ngram
listlengths 0
67.8k
|
|---|
[
"{'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data) print(r.text) if __name__ == '__main__': unittest.main()",
"'file2': f2, 'file3': f3} data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data)",
"'rb') as f2, open( 'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id",
"= run.info.run_id print(experiment_id + \":\" + run_id) files = {'file1': f1, 'file2': f2,",
"from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1,",
"print(experiment_id + \":\" + run_id) files = {'file1': f1, 'file2': f2, 'file3': f3}",
"'rb') as f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb') as f3: client",
"requests from mlflow.exceptions import RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self):",
"e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id",
"client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\"",
"'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException as",
"run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\" + run_id) files =",
"as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException as e:",
"experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\" + run_id) files",
"'file3': f3} data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data) print(r.text) if",
"testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb')",
"run_id = run.info.run_id print(experiment_id + \":\" + run_id) files = {'file1': f1, 'file2':",
"f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\")",
"RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id",
"{'file1': f1, 'file2': f2, 'file3': f3} data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact',",
"= experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\" + run_id)",
"f2, open( 'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\")",
"import RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb')",
"f3} data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data) print(r.text) if __name__",
"= client.create_experiment(\"foo\") except RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run",
"class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as f2,",
"data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data) print(r.text) if __name__ ==",
"def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif',",
"+ run_id) files = {'file1': f1, 'file2': f2, 'file3': f3} data = {'run_id':",
"unittest import requests from mlflow.exceptions import RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase):",
"try: experiment_id = client.create_experiment(\"foo\") except RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id =",
"f1, 'file2': f2, 'file3': f3} data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files,",
"= MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException as e: experiment = client.get_experiment_by_name(\"foo\")",
"as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id =",
"+ \":\" + run_id) files = {'file1': f1, 'file2': f2, 'file3': f3} data",
"mlflow.exceptions import RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt',",
"client.create_experiment(\"foo\") except RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run =",
"run.info.run_id print(experiment_id + \":\" + run_id) files = {'file1': f1, 'file2': f2, 'file3':",
"<reponame>elna4os/mlflow_flask_artifacts_logger import unittest import requests from mlflow.exceptions import RestException from mlflow.tracking import MlflowClient",
"MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id",
"with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb') as",
"RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as",
"as f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb') as f3: client =",
"except RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id)",
"run_id) files = {'file1': f1, 'file2': f2, 'file3': f3} data = {'run_id': run_id}",
"files = {'file1': f1, 'file2': f2, 'file3': f3} data = {'run_id': run_id} r",
"import unittest import requests from mlflow.exceptions import RestException from mlflow.tracking import MlflowClient class",
"from mlflow.exceptions import RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with",
"MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as",
"open( 'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except",
"= client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\" + run_id) files = {'file1':",
"open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try:",
"open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as f2, open( 'artifacts/animation.gif', 'rb') as f3:",
"experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\" +",
"= client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id +",
"= {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data) print(r.text) if __name__ == '__main__':",
"'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException",
"client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException as e: experiment =",
"client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id + \":\" + run_id) files = {'file1': f1,",
"\":\" + run_id) files = {'file1': f1, 'file2': f2, 'file3': f3} data =",
"= {'file1': f1, 'file2': f2, 'file3': f3} data = {'run_id': run_id} r =",
"import requests from mlflow.exceptions import RestException from mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def",
"f2, 'file3': f3} data = {'run_id': run_id} r = requests.post('http://localhost:5001/log_artifact', files=files, data=data) print(r.text)",
"f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id = client.create_experiment(\"foo\") except RestException as e: experiment",
"experiment_id = client.create_experiment(\"foo\") except RestException as e: experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id",
"TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb') as f2, open(",
"experiment = client.get_experiment_by_name(\"foo\") experiment_id = experiment.experiment_id run = client.create_run(experiment_id) run_id = run.info.run_id print(experiment_id",
"mlflow.tracking import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png',",
"as f2, open( 'artifacts/animation.gif', 'rb') as f3: client = MlflowClient(tracking_uri=\"http://localhost:5000\") try: experiment_id =",
"import MlflowClient class TestMLFlowArtifactsProxy(unittest.TestCase): def testLogArtifact(self): with open('artifacts/foo.txt', 'rb') as f1, open('artifacts/image.png', 'rb')"
] |
[
"import sys import pytest from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp =",
"sys import pytest from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__),",
"from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def",
"os.path as osp import sys import pytest from sphinx.testing.path import path pytest_plugins =",
"as osp import sys import pytest from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures'",
"import os.path as osp import sys import pytest from sphinx.testing.path import path pytest_plugins",
"pytest from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session')",
"pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def rootdir(): return path(sphinx_supp) sys.path.insert(0,",
"import pytest from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\"))",
"path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def rootdir(): return path(sphinx_supp)",
"sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def rootdir():",
"'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def rootdir(): return path(sphinx_supp) sys.path.insert(0, osp.join(sphinx_supp, \"test-root\"))",
"<gh_stars>10-100 import os.path as osp import sys import pytest from sphinx.testing.path import path",
"= 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def rootdir(): return path(sphinx_supp) sys.path.insert(0, osp.join(sphinx_supp,",
"import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp = osp.abspath(osp.join(osp.dirname(__file__), \"tests\")) @pytest.fixture(scope='session') def rootdir(): return",
"osp import sys import pytest from sphinx.testing.path import path pytest_plugins = 'sphinx.testing.fixtures' sphinx_supp"
] |
[
"#结构体 from out import * #界面窗口 # from config import originate, target import",
"import * #界面窗口 # from config import originate, target import time import os",
"out import * #界面窗口 # from config import originate, target import time import",
"# from config import originate, target import time import os if __name__ ==",
"from config import originate, target import time import os if __name__ == '__main__':",
"#界面窗口 # from config import originate, target import time import os if __name__",
"config import originate, target import time import os if __name__ == '__main__': run()",
"* #结构体 from out import * #界面窗口 # from config import originate, target",
"import * #结构体 from out import * #界面窗口 # from config import originate,",
"from out import * #界面窗口 # from config import originate, target import time",
"* #界面窗口 # from config import originate, target import time import os if",
"process import * #结构体 from out import * #界面窗口 # from config import",
"from process import * #结构体 from out import * #界面窗口 # from config"
] |
[
"re_botcmd class Newbie(BotPlugin): \"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2')",
"<filename>plugins/newbie.py import re from errbot import BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the",
"BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE,",
"class Newbie(BotPlugin): \"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def",
"List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args):",
"re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show the bot rules. \"\"\"",
"the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\"",
"Newbie(BotPlugin): \"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self,",
"flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show the bot rules. \"\"\" return",
"@re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show the bot rules.",
"import re from errbot import BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the bot",
"\"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg,",
"template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show the bot rules. \"\"\" return {'rules':",
"from errbot import BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the bot rules \"\"\"",
"re from errbot import BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the bot rules",
"import BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw',",
"bot rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show",
"def newbie(self, msg, args): \"\"\" Show the bot rules. \"\"\" return {'rules': True}",
"\"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show the bot",
"errbot import BotPlugin, re_botcmd class Newbie(BotPlugin): \"\"\" List the bot rules \"\"\" @re_botcmd(pattern=r'newbie',",
"rules \"\"\" @re_botcmd(pattern=r'newbie', re_cmd_name_help='newbiw', flags=re.IGNORECASE, template='newbie.jinja2') def newbie(self, msg, args): \"\"\" Show the"
] |
[
"or a list of ints'.format(name)) return ','.join([str(i) for i in id_]) if isinstance(id_,",
"Union class Region(Enum): NA = 'com' EU = 'eu' RU = 'ru' AS",
"is None: return None if not isinstance(id_, int) and any(not isinstance(x, int) for",
"if not isinstance(id_, int) and any(not isinstance(x, int) for x in id_): raise",
"= 'ru' AS = 'asia' def lst_of_int(id_, name): if id_ is None: return",
"def lst_of_int(id_, name): if id_ is None: return None if not isinstance(id_, int)",
"must be an int or a list of ints'.format(name)) return ','.join([str(i) for i",
"None if not isinstance(id_, int) and any(not isinstance(x, int) for x in id_):",
"class Region(Enum): NA = 'com' EU = 'eu' RU = 'ru' AS =",
"= 'com' EU = 'eu' RU = 'ru' AS = 'asia' def lst_of_int(id_,",
"'eu' RU = 'ru' AS = 'asia' def lst_of_int(id_, name): if id_ is",
"lst_of_int(id_, name): if id_ is None: return None if not isinstance(id_, int) and",
"isinstance(x, int) for x in id_): raise ValueError('{} must be an int or",
"typing import List, Union class Region(Enum): NA = 'com' EU = 'eu' RU",
"int) and any(not isinstance(x, int) for x in id_): raise ValueError('{} must be",
"and any(not isinstance(x, int) for x in id_): raise ValueError('{} must be an",
"return None if not isinstance(id_, int) and any(not isinstance(x, int) for x in",
"raise ValueError('{} must be an int or a list of ints'.format(name)) return ','.join([str(i)",
"int or a list of ints'.format(name)) return ','.join([str(i) for i in id_]) if",
"','.join([str(i) for i in id_]) if isinstance(id_, list) else id_ l_int = Union[int,",
"AS = 'asia' def lst_of_int(id_, name): if id_ is None: return None if",
"of ints'.format(name)) return ','.join([str(i) for i in id_]) if isinstance(id_, list) else id_",
"List, Union class Region(Enum): NA = 'com' EU = 'eu' RU = 'ru'",
"Region(Enum): NA = 'com' EU = 'eu' RU = 'ru' AS = 'asia'",
"any(not isinstance(x, int) for x in id_): raise ValueError('{} must be an int",
"in id_): raise ValueError('{} must be an int or a list of ints'.format(name))",
"'ru' AS = 'asia' def lst_of_int(id_, name): if id_ is None: return None",
"id_ is None: return None if not isinstance(id_, int) and any(not isinstance(x, int)",
"= 'eu' RU = 'ru' AS = 'asia' def lst_of_int(id_, name): if id_",
"RU = 'ru' AS = 'asia' def lst_of_int(id_, name): if id_ is None:",
"isinstance(id_, int) and any(not isinstance(x, int) for x in id_): raise ValueError('{} must",
"for x in id_): raise ValueError('{} must be an int or a list",
"from typing import List, Union class Region(Enum): NA = 'com' EU = 'eu'",
"Enum from typing import List, Union class Region(Enum): NA = 'com' EU =",
"x in id_): raise ValueError('{} must be an int or a list of",
"ValueError('{} must be an int or a list of ints'.format(name)) return ','.join([str(i) for",
"int) for x in id_): raise ValueError('{} must be an int or a",
"if id_ is None: return None if not isinstance(id_, int) and any(not isinstance(x,",
"for i in id_]) if isinstance(id_, list) else id_ l_int = Union[int, List[int]]",
"id_): raise ValueError('{} must be an int or a list of ints'.format(name)) return",
"import Enum from typing import List, Union class Region(Enum): NA = 'com' EU",
"list of ints'.format(name)) return ','.join([str(i) for i in id_]) if isinstance(id_, list) else",
"return ','.join([str(i) for i in id_]) if isinstance(id_, list) else id_ l_int =",
"'com' EU = 'eu' RU = 'ru' AS = 'asia' def lst_of_int(id_, name):",
"import List, Union class Region(Enum): NA = 'com' EU = 'eu' RU =",
"'asia' def lst_of_int(id_, name): if id_ is None: return None if not isinstance(id_,",
"= 'asia' def lst_of_int(id_, name): if id_ is None: return None if not",
"None: return None if not isinstance(id_, int) and any(not isinstance(x, int) for x",
"enum import Enum from typing import List, Union class Region(Enum): NA = 'com'",
"EU = 'eu' RU = 'ru' AS = 'asia' def lst_of_int(id_, name): if",
"name): if id_ is None: return None if not isinstance(id_, int) and any(not",
"ints'.format(name)) return ','.join([str(i) for i in id_]) if isinstance(id_, list) else id_ l_int",
"an int or a list of ints'.format(name)) return ','.join([str(i) for i in id_])",
"from enum import Enum from typing import List, Union class Region(Enum): NA =",
"not isinstance(id_, int) and any(not isinstance(x, int) for x in id_): raise ValueError('{}",
"be an int or a list of ints'.format(name)) return ','.join([str(i) for i in",
"a list of ints'.format(name)) return ','.join([str(i) for i in id_]) if isinstance(id_, list)",
"NA = 'com' EU = 'eu' RU = 'ru' AS = 'asia' def"
] |
[
"%(option,finish,note)) elif strt == BASE: print (\"\\t[%d] Base -> Start %s\" %(option,note)) else:",
"get out if dude == BASE: if (die == 1 or die ==",
"=\" %(color,die),response) return SortMoves(response) # # IsWinner(color) # # Determine if color has",
"magic circle, you get an upper case # letter if i in MagicCircle:",
"HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0])",
"base # def Bonk(space): if space == CENTER: deadGuy = CenterSpace else: deadGuy",
"# homeloc is (potential) home space # Move into Home if homeloc >=",
"Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for b in Base[p]: if",
"= ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen = chr(216) cen = chr(0x00A7) #",
"for i in range(1,die+1): testloc = marble+i if testloc >= myStart: # testloc",
"if homeloc >= 0 and homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc, die,",
"my marbles? All your base are belong to us. Marbles[c] = [BASE, BASE,",
"((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! # print",
"finalspot, note, distance]) # MOVEMENT INTO HOME # NB: Add special cases for",
"= chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t",
"\"\" moveDesc += \"Home[\" + str(source-HOME+1) + \"] -> \" else: assert Board[source]",
"# [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\":",
"if not moves: print (\"No moves available.\") continue GotInput = 0 selection =",
"NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers == -3:",
"(Board[Start[color]]!=color): return True return False assert marble != BASE # CENTER SPACE HANDLING",
"Can't pass teammate return False # Checked all intermediate spaces, and destination space",
"CenterSpace: print (\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk \" +",
"= CenterSpace else: deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) #",
"# def Move(color, source, destination): global CenterSpace moveDesc = color + \": \"",
"land on myself if mc == 6: pass else: badMove = 1 #",
"GameOver = 0 # Is the game over turnNum = 0 robotMode =",
"somebody in the way badMove = 1 else: # Still on the main",
"A human is needed numPlayers = Setup() if numPlayers <= 0: robotMode =",
"# circleNum is the index of where we are in the magic #",
"loc < myStart: if Board[loc]: note = \"[Bonk \" + Board[loc] + \"]\"",
"chr(0x00B7) # A nice dot #print (\"-\", end=\"\") # Occupied space else: #",
"!= BASE # CENTER SPACE HANDLING # If my roll can take me",
"(\"That's not an option. Try again.\") GotInput = 0 except ValueError: if len(moves)",
"\"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI",
"Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d] %s\"",
"then continue the normal # track, or hope 2 magic circle space and",
"== destination and die == 6 and marble in MagicCircle: return True #",
"game over turnNum = 0 robotMode = 0 # A human is needed",
"Marbles[c] = [BASE, BASE, BASE, BASE ] robotMode = 0 Setup = 0",
"to do # a full revolution if dude != MoveToCheck: # If it",
"destination is that color's start destination = Start[color] moveDesc += \"[Base] -> \"",
"True # Catch all assert False return False # # SortMoves(myList) # #",
"assert CenterSpace == color CenterSpace = \"\" moveDesc += \"[Center] -> \" elif",
"if not selfPass: note = \"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note",
"should probably remove # most of thie duplicate logic from GetMoves and have",
"all the magic spaces between where I entered # and where I exited",
"return False return True assert marble not in MagicCircle # MOVEMENT INTO HOME",
"Home return False elif Home[color][testloc]: # somebody in the way return False else:",
"track, or hope 2 magic circle space and then continue the # normal",
"is a replicant! if robotMode and pColor == \"Blue\": selection = 1 GotInput",
"# Move into Home if homeloc >= 0 and homeloc < HOMESIZE: return",
"take the first option selection = 1 GotInput = 1 elif pColor ==",
"continue if circleBlock: continue if not badMove: # Add this to the list",
"not an option. Try again.\") GotInput = 0 except ValueError: if len(moves) ==",
"else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk! #",
"# Movement WITHIN Home if marble >= HOME: assert marble < HOME+HOMESIZE assert",
"# List that we'll be returning with ALL valid moves response = []",
"firstStart=0 continue else: continue # # Handle \"regular\" motion starting here: # #",
"I # can enter the Center. dude+die-1 is equal to MagicCircle+1 if dude+die-1",
"in moves[i][2]: selection = i+1 print (\"Kill!\", moves[i]) break if not selection: selection",
"Home[color] hp = marble-HOME # hp means Home Position for i in range(1,die+1):",
"< myStart and marble+die >= myStart: # Test the spaces between here and",
"and Board[finalspot] and not special: note += \" & \" if Board[finalspot] and",
"# # Determine if color has won. Returns True/False # def IsWinner(color): win=1",
"assert marble+die-1 in MagicCircle if CenterSpace == color: return False for i in",
"if dude != MoveToCheck: # If it is not me, then it is",
">= HOMESIZE): valid=0 continue if hp+i > HOMESIZE or hm[hp+i] == color: valid=0",
"the magic circle spaces Base = {} # Dict of each color's base",
"-> Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d] Center",
"to destination. # def Move(color, source, destination): global CenterSpace moveDesc = color +",
"color=%s)\" %(marble, destination, die, color)) assert die > 0 and die < 7",
"marble, figure out all possible moves firstStart=1 # Only want to add Start",
"BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False return True",
"by .sorted to return lists in order def SortMoves(sub_li): sub_li.sort(key = lambda x:",
"nearest magic circle space, checking # that walk. if Board[destination-i] == color: return",
"\"[Start\" if Board[Start[color]]: note += \" & Bonk \" + Board[Start[color]] note +=",
"in order def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x:",
"of modulo problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\",
"in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note =\"\"",
"color): assert False distance = BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE response.append([dude,",
"players # Marbles[color] : { location0, location1, location2, location3 } # Start[color] :",
"the Center space if destination == CENTER: assert marble+die-1 in MagicCircle if CenterSpace",
"# Take a random option selection = randint(1,len(moves)) GotInput = 1 while not",
"note += \"]\" if not ValidMove(dude, i, die, color): assert False distance =",
"in MagicCircle # MOVEMENT INTO HOME myStart = Start[color] if myStart == 0:",
"All your base are belong to us. Marbles[c] = [BASE, BASE, BASE, BASE",
"hp = marble-HOME # hp means Home Position for i in range(1,die+1): if(hp+i",
"%s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove",
"NumPlayers >= -6 and NumPlayers <= -2: print (\"Like tears in rain.\") robotMode",
"selfPass: note = \"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\"",
"Start[color]) % BOARDSIZE response.append([dude, loc, note, distance]) # Movement WITHIN Home elif dude",
"MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" & \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk",
"that list circleNum = MagicCircle.index(dude) # Lots of permutations for magic circle... for",
"= 0 # Is the game over turnNum = 0 robotMode = 0",
"in the magic # circle list, so we can bop around by adding",
"for i in MagicCircle: if Board[i] != color: note = \"[Magic Circle\" if",
"if not ValidMove(dude, Start[color], die, color): assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0",
"via die is valid # Returns True / False # # This is",
"= ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for",
">= -6 and NumPlayers <= -2: print (\"Like tears in rain.\") robotMode =",
"CENTER: assert CenterSpace == color CenterSpace = \"\" moveDesc += \"[Center] -> \"",
"42, \"Green\": 56, \"White\": 70 } # # Roll(): # # Roll a",
"in Base[p]: if b == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else:",
"way badMove = 1 else: # Still on the main board if Board[testloc%BOARDSIZE]",
"== color: return False return True # \"NORMAL\" MOVEMENT if marble not in",
"moveDesc += \"Bonk \" + Board[destination] + \"!\" print (\"Bonk! %s hits %s!\"",
"GotInput = 1 elif pColor == \"Green\": # Take a random option selection",
"space homeloc = destination - HOME # homeloc is (potential) home space #",
"1: selection = 1 GotInput = 1 else: print (\"Bad input\") GotInput =",
"if Board[destination-i] == color: return False return True assert marble not in MagicCircle",
"hm[hp+i] == color: valid=0 continue if valid: if not ValidMove(dude, dude+die, die, color):",
"backwards print(\"\\n\") for p in Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\")",
"from source to destination. # def Move(color, source, destination): global CenterSpace moveDesc =",
"\"Red\" or pColor == \"Purple\": # Blood shall flow GotInput = 1 for",
"assert marble != CENTER assert destination != CENTER # Special case of 6",
"+= \"]\" if not ValidMove(dude, Start[color], die, color): assert False response.append([dude, Start[color], note,",
"# Ran off the end of Home return False elif Home[color][testloc]: # somebody",
"Home? if testloc >= HOMESIZE: # Ran off the end of Home badMove",
"x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [ 7, 21, 35, 49,",
"# If my roll can take me to one past the MagicCircle, then",
"# def ValidMove(marble, destination, die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\"",
"a die roll # def GetMoves(color,die): assert die > 0 and die <",
"is in the Home zone testloc -= myStart # How many spaces into",
"default creset=\"\\033[m\" output = [\"-\" for x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE):",
"If my roll can take me to one past the MagicCircle, then I",
"% BOARDSIZE response.append([dude, i, note, distance]) continue assert dude != CENTER # MAGIC",
"] # Locations for the magic circle spaces Base = {} # Dict",
"Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove (marble, destination, die) # # Check",
"else: #output[i] = (\"-\") output[i] = chr(0x00B7) # A nice dot #print (\"-\",",
"if selection < 1 or selection > len(moves): print (\"That's not an option.",
"Circle\" if Board[i]: note += \" & Bonk \" + Board[i] note +=",
"in the Home zone testloc -= myStart # How many spaces into Home?",
"Setup() if numPlayers <= 0: robotMode = 1 numPlayers *= -1 # TkSetup()",
"= marble-HOME # hp means Home Position for i in range(1,die+1): if(hp+i >=",
"# # Gets the board ready for a new game, and assigns player",
"figure out all possible moves firstStart=1 # Only want to add Start once",
"can roll out to any # magic circle space if dude == CENTER:",
"Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers #",
"a # to indicate start spaces elif i in Start.values(): # What's this?",
"finish >= HOME: if strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\",
"the color given the # value. So here's a bunch of casting black",
"destination < BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False",
"the magic spaces between where I entered # and where I exited for",
"if not ValidMove(dude, CENTER, die, color): assert False distance = BOARDSIZE - 8",
"indicate magic circle if i in MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\")",
"circleBlock = 1 continue if circleBlock: continue if not badMove: # Add this",
"# \"Location\" for base spots. All are 99. HOME=100 # \"Location\" for home",
"ValueError: print (\"Please enter a number between 2 and 6.\") Setup = 0",
"- 100, 101, 102, 103 HOMESIZE=4 # How big is your home? Colors",
"\"[Center] -> \" elif source == BASE: # Remove the marble from the",
"BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\"",
"die != 6: return False # If this marble is in Base, see",
"# Handle \"regular\" motion starting here: # # CENTER SPACE HANDLING # If",
"marble < BOARDSIZE and destination < BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE]",
"magicDestination = MagicCircle.index(destination-i) # Check all the magic spaces between where I entered",
"assert CenterSpace != color moveDesc += \"[Center] \" if CenterSpace: print (\"Bonk! %s",
"SortMoves(response) # # IsWinner(color) # # Determine if color has won. Returns True/False",
">= HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\" + str(source-HOME+1) + \"] ->",
"check because I was having problems. :) I should probably remove # most",
"< 7 assert color # Quick check to see if there's a teammate",
"marble of color color from source to destination. # def Move(color, source, destination):",
"CenterSpace: note += \" & Bonk \" + CenterSpace note += \"]\" if",
"ValidMove(dude, finalspot, die, color): assert False distance = BOARDSIZE - (finalspot - Start[color])",
"is that color's start destination = Start[color] moveDesc += \"[Base] -> \" elif",
"moveDesc = color + \": \" # Remove marble from source if source",
"moves = GetMoves(pColor, myRoll) if not moves: print (\"No moves available.\") continue GotInput",
"note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance =",
"print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll be returning with ALL",
"board if Board[testloc%BOARDSIZE] == color: # Can't pass teammate badMove = 1 #",
"Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate # 6 in magic circle means",
"0 Setup = 0 # Has the game been setup? while not Setup:",
"if CenterSpace: print (\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk \"",
"print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) # # IsWinner(color) # #",
"= GetMoves(pColor, myRoll) if not moves: print (\"No moves available.\") continue GotInput =",
"#print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # #",
"!= color: selfPass = 0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color:",
"6) and (Board[Start[color]]!=color): return True return False assert marble != BASE # CENTER",
"pColor == \"White\": # Always take the first option selection = 1 GotInput",
"else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in Home[p]: if h",
"and marble != destination and die != 6: return False # If this",
"Marbles[color] : { location0, location1, location2, location3 } # Start[color] : space# Start",
"0, # because of modulo problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE >=",
"\"Blue\": selection = 1 GotInput = 1 if pColor == \"Red\" or pColor",
"#print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if i >=0 and i <",
"are my marbles? All your base are belong to us. Marbles[c] = [BASE,",
"distance]) # If I'm in the center and I got a one, I",
"note += \"]\" if not ValidMove(dude, Start[color], die, color): assert False response.append([dude, Start[color],",
"special: note += \" & \" if Board[finalspot] and not special: note +=",
"print (\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk \" + CenterSpace",
"in MagicCircle: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish,note)) else: print",
"in range(0, HOMESIZE): if Home[color][i] != color: win=0 break return bool(win) def TkSetup():",
"canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def Main(): GameOver = 0 # Is",
"Players = [] # List of active players # Marbles[color] : { location0,",
"not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response =",
"# def Display(): # Color! # ANSI color codes for the marbles ccode={",
"for Blue, with start space of 0, # because of modulo problems. elif",
">= Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color] if myStart == 0: #",
"move in moves: strt, finish, note, distance = move if finish >= HOME:",
"roll # def GetMoves(color,die): assert die > 0 and die < 7 assert",
"== \"White\": # Always take the first option selection = 1 GotInput =",
"The magic circle is poisoned from # here on out.. circleBlock = 1",
"elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE]",
"walk. if Board[destination-i] == color: return False return True assert marble not in",
"Board[MagicCircle[j]] == color: if marble == destination and die == 6: return False",
"not Setup: try: Setup=1 NumPlayers = int(input(\"How many players? \")) if NumPlayers ==",
"if not badMove: # Valid moves only loc = dude+die # loc is",
"NumPlayers == 5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else:",
"ValidMove(dude, dude+die, die, color): assert False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT",
"range(1,die) if not badMove: # Valid moves only loc = dude+die # loc",
"and out badMove=0 circleBlock=0 # Check magic circle spots I traversed for mc",
"35, 49, 63, 77 ] # Locations for the magic circle spaces Base",
"False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue # # Handle \"regular\"",
"special=1 note = \"\" if (finalspot in MagicCircle) or (Board[finalspot]): note += \"[\"",
"circle, so walk # back to the nearest magic circle space, checking #",
"1 numPlayers *= -1 # TkSetup() Display() # Show the initial game board",
"\"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the color to default creset=\"\\033[m\" output",
"Returns True/False # def IsWinner(color): win=1 for i in range(0, HOMESIZE): if Home[color][i]",
"Locations for the magic circle spaces Base = {} # Dict of each",
"Red always goes for the kill # White tried to be optimal, but",
"if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and",
"NumPlayers <= -2: print (\"Like tears in rain.\") robotMode = 1 elif NumPlayers",
"dude-HOME # hp means Home Position valid=1 for i in range(1,die+1): if(hp+i >=",
"== \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print()",
"Base, see if it can get out if marble == BASE: assert destination",
"Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board = [\"\"",
"for i in range(1,die+1): testloc = dude+i if not badMove and testloc >=",
"or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\")",
"end=\"\") output[i] = chr(0x00A4) # cool circle thing # Use a # to",
"of the board. # XXX: This could be replaced with Tk or something",
"sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li #",
"== \"Red\" or pColor == \"Purple\": # Blood shall flow GotInput = 1",
"\"regular\" motion starting here: # # CENTER SPACE HANDLING # If my roll",
"%(option, strt, finish, note)) option+=1 try: selection = int(input(pColor + \": Please select",
"# Prints out the state of the board. # XXX: This could be",
"int(input(pColor + \": Please select an option: \")) GotInput = 1 if selection",
"+ t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle case where I roll a",
"# normal track, or ... if dude in MagicCircle: # circleNum is the",
"CenterSpace != color moveDesc += \"[Center] \" if CenterSpace: print (\"Bonk! %s hits",
"i in range(1,die+1): if Board[dude+i] == color: yep=0 if yep: note = \"[Center\"",
"#print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\")",
"# Board destination is not the center or Home assert Board[destination] != color",
"selection = randint(1,len(moves)) GotInput = 1 while not GotInput: option=1 # Counter for",
"GotInput = 1 else: print (\"Bad input\") GotInput = 0 except TypeError: print",
"and testloc >= myStart: # testloc is in the Home zone testloc -=",
"Base[color].remove(color) # The destination is that color's start destination = Start[color] moveDesc +=",
"between 1 and 6 # def Roll(): return randint(1,6) # # Display(): #",
"assert Board[source] == color Board[source] = \"\" moveDesc += \"\" + str(source) +",
"1 elif Home[color][testloc]: # somebody in the way badMove = 1 else: #",
"0 and dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart",
"NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers",
"# value. So here's a bunch of casting black magic to # do",
"if hp+i > HOMESIZE or hm[hp+i] == color: return False return True #",
"and dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart =",
"\" + Board[loc] + \"]\" if not ValidMove(dude, loc, die, color): assert False",
"%d -> %d %s\" %(option, strt, finish,note)) else: print (\"\\t[%d] %d -> %d",
"dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color]",
"range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection = i+1 print (\"Kill!\", moves[i]) break if",
"(\"Please enter a number between 2 and 6.\") Setup = 0 print (\"Preparing",
"die == 6: return False return True else: # The destination is not",
"# # Main # def Main(): GameOver = 0 # Is the game",
"response.append([dude, CENTER, note, distance]) # If I'm in the center and I got",
"i in Start.values(): # What's this? I need to get the color given",
"(\"The only way to win is not to play.\") NumPlayers = -6 robotMode",
"all assert False return False # # SortMoves(myList) # # Used by .sorted",
"Ran off the end of Home badMove = 1 elif Home[color][testloc]: # somebody",
"< HOME+HOMESIZE assert destination >= HOME hm = Home[color] # hm means Home[color]",
"or die == 6) and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if Board[Start[color]]:",
"< myStart: if Board[loc]: note = \"[Bonk \" + Board[loc] + \"]\" if",
"KeyError: print (\"Please enter a number between 2 and 6.\") Setup = 0",
"+ \"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source)",
"circle, I can continue normal track, or # hop one magic circle space",
"not selection: selection = 1 elif pColor == \"Cyan\" or pColor == \"Purple\"",
"Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance",
"myStart: # Test the spaces between here and my final location for #",
"return False return True # Leaving the Center space if marble == CENTER:",
"in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False return True # Leaving the",
"assert destination == Start[color] if (die == 1 or die == 6) and",
"a 6 while again: again=0 pColor = Players[p] myRoll = Roll() print (\"\\n%s",
"Color! # ANSI color codes for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\",",
"\"White\" ] Board = [\"\" for x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle",
"through teammate # 6 in magic circle means I can land on myself",
"0: print (\"The only way to win is not to play.\") NumPlayers =",
"deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source,",
"myRoll = Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll)",
"for the user input menu for move in moves: strt, finish, note, distance",
"i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False return True # Leaving",
"\"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color",
"that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset",
"False if hp+i > HOMESIZE or hm[hp+i] == color: return False return True",
"= 0 # Has the game been setup? while not Setup: try: Setup=1",
"of hops out of circle MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck] ==",
"= Start[color] if myStart == 0: # HACK for Blue with start of",
"# Return a list of the valid player options with a die roll",
"# somebody in the way badMove = 1 else: # Still on the",
"range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue if not selfPass: note",
"i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE #",
"i in range(0, HOMESIZE): if Home[color][i] != color: win=0 break return bool(win) def",
"while not GotInput: option=1 # Counter for the user input menu for move",
"Center space if marble == CENTER: if die==1 and Board[destination] != color: return",
"i, die, color): assert False distance = BOARDSIZE - (i - Start[color]) %",
"continue the normal # track, or hope 2 magic circle space and then",
"# to the index in that list circleNum = MagicCircle.index(dude) # Lots of",
"i in range(1,die+1): if(hp+i >= HOMESIZE): return False if hp+i > HOMESIZE or",
"if (finalspot in MagicCircle) or (Board[finalspot]): note += \"[\" if finalspot in MagicCircle:",
"base status Home = {} # Dict of each color's home status Marbles",
"Board[dude+i] == color: yep=0 if yep: note = \"[Center\" if CenterSpace: note +=",
"finish, note)) option+=1 try: selection = int(input(pColor + \": Please select an option:",
"i, note, distance]) continue assert dude != CENTER # MAGIC CIRCLE HANDLING #",
"else: if finish == CENTER: print (\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif",
"Position valid=1 for i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if hp+i",
"# Handle case where I roll a 6 and want to do #",
"number between 2 and 6.\") Setup = 0 except TypeError: print (\"Please enter",
"\")) if NumPlayers == 0: print (\"The only way to win is not",
"%s!\" %(color, CenterSpace)) moveDesc += \"Bonk \" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace",
"= 1 # End of for i in range(1,die) if not badMove: #",
"= 1 continue if not selfPass: note = \"\" if (dude+die)%BOARDSIZE in MagicCircle",
"i >= 21 and i < 42: if i == 31: if CenterSpace:",
"marble from the base assert Base[color].count(color) > 0 Base[color].remove(color) # The destination is",
"%(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d] %d -> %d %s\" %(option, strt,",
"Please select an option: \")) GotInput = 1 if selection < 1 or",
"adding die values # to the index in that list magicStart = MagicCircle.index(marble)",
"a number between 2 and 6.\") Setup=0 except KeyError: print (\"Please enter a",
"the MagicCircle, then I # can enter the Center. marble+die-1 is equal to",
"I should probably remove # most of thie duplicate logic from GetMoves and",
"space, checking # that walk. if Board[destination-i] == color: return False return True",
"assert marble not in MagicCircle # MOVEMENT INTO HOME myStart = Start[color] if",
"selfPass = 0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass =",
"if destination is not empty if Board[destination]: moveDesc += \"Bonk \" + Board[destination]",
"+= 1 for p in range(0,numPlayers): again=1 # Flag for when a player",
"and my final location for # teammates for i in range(1,die+1): testloc =",
"{ \"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\": 70",
"end=\"\") if i == 20: print() elif i >= 21 and i <",
"# How big is your home? Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\",",
"# HACK for Blue with start of 0 myStart = BOARDSIZE for i",
"1 if selection < 1 or selection > len(moves): print (\"That's not an",
"ValidMove (marble, destination, die) # # Check if the move from marble to",
"case where I roll a 6 and want to do # a full",
"HOME: if strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1,",
"of color color from source to destination. # def Move(color, source, destination): global",
"Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note =\"\" #",
"= 0 except ValueError: if len(moves) == 1: selection = 1 GotInput =",
"== 0: output[i] = str(i // 10) else: #output[i] = (\"-\") output[i] =",
"\"[Center\" if CenterSpace: note += \" & Bonk \" + CenterSpace note +=",
"\"]\" if not ValidMove(dude, CENTER, die, color): assert False distance = BOARDSIZE -",
"= tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main #",
"moveDesc += \"\" + str(destination) + \" \" # Deal with bonking if",
"%pColor) again=1 if IsWinner(pColor): print (\"%s wins in %d turns!\" %(pColor, turnNum)) GameOver",
"MagicCircle: # circleNum is the index of where we are in the magic",
"again: again=0 pColor = Players[p] myRoll = Roll() print (\"\\n%s rolled: %d\\n\" %(pColor,",
"b == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\")",
"startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\",",
"print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish, note)) option+=1 try: selection",
"assert color in Colors assert color in Players # print (\"[Entering] GetMoves(color=%s die=%d)\"",
"a guy back to base # def Bonk(space): if space == CENTER: deadGuy",
"loop turnNum += 1 for p in range(0,numPlayers): again=1 # Flag for when",
"rolls a 6 while again: again=0 pColor = Players[p] myRoll = Roll() print",
"of Home badMove = 1 elif Home[color][testloc]: # somebody in the way badMove",
"it backwards print(\"\\n\") for p in Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\",",
"# # ValidMove (marble, destination, die) # # Check if the move from",
"continue the # normal track, or ... if dude in MagicCircle: # circleNum",
"Start[color] if myStart == 0: # I have grown to hate Blue in",
"range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE # Now verify",
"(\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup():",
"and die != 6: return False # If this marble is in Base,",
"# Print it backwards print(\"\\n\") for p in Players: print (\"%s\\t\" %p, end=\"\")",
"initial game board while not GameOver: # Main game loop turnNum += 1",
">= HOME hm = Home[color] # hm means Home[color] hp = marble-HOME #",
"kill # White tried to be optimal, but sucked so now takes 1",
"badMove and testloc >= myStart: # testloc is in the Home zone testloc",
"so now takes 1 # Cyan takes option 1 # Purple kills #",
"myStart # homeloc is home space # Move into Home if homeloc >=",
"for i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if hp+i > HOMESIZE",
"or hm[hp+i] == color: return False return True # \"NORMAL\" MOVEMENT if marble",
"The destination is that color's start destination = Start[color] moveDesc += \"[Base] ->",
"-> %d %s\" %(option, strt, finish,note)) else: print (\"\\t[%d] %d -> %d %s\"",
"return False # Checked all intermediate spaces, and destination space homeloc = destination",
"moves: strt, finish, note, distance = move if finish >= HOME: if strt",
"optimal, but sucked so now takes 1 # Cyan takes option 1 #",
"or she chooses 1 # Deckard is a replicant! if robotMode and pColor",
"destination < BOARDSIZE: if Board[destination] == color and marble != destination and die",
"CENTER: assert CenterSpace != color moveDesc += \"[Center] \" if CenterSpace: print (\"Bonk!",
"myStart = BOARDSIZE if marble < myStart and marble+die >= myStart: # Test",
"99. HOME=100 # \"Location\" for home spots - 100, 101, 102, 103 HOMESIZE=4",
"die) # # Return a list of the valid player options with a",
"replaced with Tk or something else. # def Display(): # Color! # ANSI",
"color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die, color)) assert",
"= \"[Center\" if CenterSpace: note += \" & Bonk \" + CenterSpace note",
"if die==1 and Board[destination] != color: return True else: return False assert marble",
"Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and",
"\"\", \"\"] # Where are my marbles? All your base are belong to",
"Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll be returning",
"Main game loop turnNum += 1 for p in range(0,numPlayers): again=1 # Flag",
"2 and 6.\") Setup = 0 except TypeError: print (\"Please enter a number",
"wins in %d turns!\" %(pColor, turnNum)) GameOver = 1 return # We're out",
"destination): global CenterSpace moveDesc = color + \": \" # Remove marble from",
"many spaces into Home? if testloc >= HOMESIZE: # Ran off the end",
"return False # Something insane happened? # Movement WITHIN Home if marble >=",
"the roll is a 6 in magic # circle, that isn't bonking because",
"of each color's marble locations Players = [] # List of active players",
"Marbles! # # An implementation of the classic marble board game that can",
"space == \"\": # Use a * to indicate magic circle if i",
"if homeloc >= 0 and homeloc < HOMESIZE: return True assert False return",
"distance = move if finish >= HOME: if strt >= HOME: print(\"\\t[%d] Home[%d]",
"HOMESIZE or hm[hp+i] == color: valid=0 continue if valid: if not ValidMove(dude, dude+die,",
"Determine if color has won. Returns True/False # def IsWinner(color): win=1 for i",
"i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if hp+i > HOMESIZE or",
"bonking because it is me. special=0 if dude == finalspot: # End where",
"if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE,",
"# Check if the move from marble to destination via die is valid",
"is in Base, see if it can get out if marble == BASE:",
"else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif strt ==",
"takes 1 # Cyan takes option 1 # Purple kills # Green picks",
"if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] =",
"colors. # Returns: Number of Players # def Setup(): # Initialize the bases",
"a number between 2 and 6.\") Setup = 0 except ValueError: print (\"Please",
"are in the magic # circle list, so we can bop around by",
"or die == 6) and (Board[Start[color]]!=color): return True return False assert marble !=",
"print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if not moves:",
"not moves: print (\"No moves available.\") continue GotInput = 0 selection = 0",
"pass else: badMove = 1 # Check regular spots after I left circle",
"option 1 # Purple kills # Green picks randomly from choices # Blue",
"the user input menu for move in moves: strt, finish, note, distance =",
"# here on out.. circleBlock = 1 continue if circleBlock: continue if not",
"\"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\": 70 } # #",
"%(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and i < 63: if",
"# TkSetup() Display() # Show the initial game board while not GameOver: #",
"previous note # If this marble is in Base, see if it can",
"marble locations Players = [] # List of active players # Marbles[color] :",
"ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor, src, dst) Display()",
"BOARDSIZE - 8 response.append([dude, CENTER, note, distance]) # If I'm in the center",
"\"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" & \"",
"# MOVEMENT INTO HOME # NB: Add special cases for Blue, with start",
"for Blue with start of 0 myStart = BOARDSIZE for i in range(1,die+1):",
"For each marble, figure out all possible moves firstStart=1 # Only want to",
"\"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\",",
"color: return True else: return False assert marble != CENTER assert destination !=",
"False return True else: # The destination is not in the magic circle,",
"circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE # Now verify that I",
"If this marble is in Base, see if it can get out if",
"destination == CENTER: assert CenterSpace != color moveDesc += \"[Center] \" if CenterSpace:",
"color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on the Board elif",
"= startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 == 0: output[i] =",
"= chr(0x00B7) # A nice dot #print (\"-\", end=\"\") # Occupied space else:",
"= [] # For each marble, figure out all possible moves firstStart=1 #",
"= 1 GotInput = 1 if pColor == \"Red\" or pColor == \"Purple\":",
"magicDestination+1): if Board[MagicCircle[j]] == color: if marble == destination and die == 6:",
"got a one, I can roll out to any # magic circle space",
"a list of the valid player options with a die roll # def",
"# because of modulo problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color])",
"False return True # Catch all assert False return False # # SortMoves(myList)",
"strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt, finish-HOME+1, note))",
"# Move into Home if homeloc >= 0 and homeloc < HOMESIZE: if",
"# But, you know, this is working. # def ValidMove(marble, destination, die, color):",
"players? \")) if NumPlayers == 0: print (\"The only way to win is",
"(die == 1 or die == 6) and (Board[Start[color]]!=color): return True return False",
"circle space, checking # that walk. if Board[destination-i] == color: return False return",
"#print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in",
"== CENTER: deadGuy = CenterSpace else: deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE)",
"print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return",
"SPACE HANDLING # If my roll can take me to one past the",
"dude+i if not badMove and testloc >= myStart: # testloc is in the",
"# Returns True / False # # This is pretty much a duplicate",
"# Green picks randomly from choices # Blue is the player .. or",
"for a new game, and assigns player colors. # Returns: Number of Players",
"== 5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\")",
"# Has the game been setup? while not Setup: try: Setup=1 NumPlayers =",
"\" + Board[i] note += \"]\" if not ValidMove(dude, i, die, color): assert",
"in range(0,die+1): if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all the",
"in range(0,BOARDSIZE): space = Board[i] if space == \"\": # Use a *",
"it is not me, then it is someone else badMove = 1 if",
"to play.\") NumPlayers = -6 robotMode = 1 elif NumPlayers >= -6 and",
"end of Home return False elif Home[color][testloc]: # somebody in the way return",
".sorted to return lists in order def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3])",
"kills # Green picks randomly from choices # Blue is the player ..",
"this to the list # Special processing: If the roll is a 6",
"in case, clear out any previous note # If this marble is in",
"want to do # a full revolution if dude != MoveToCheck: # If",
"i % 10 == 0: output[i] = str(i // 10) else: #output[i] =",
"the Center. marble+die-1 is equal to MagicCircle+1 # Entering the Center space if",
"# Deckard is a replicant! if robotMode and pColor == \"Blue\": selection =",
"GetMoves and have it here only. # But, you know, this is working.",
"= \"\" moveDesc += \"\" + str(source) + \" -> \" # Deal",
"Gets the board ready for a new game, and assigns player colors. #",
"# check because I was having problems. :) I should probably remove #",
"= lambda x: x[0]) return sub_li # # GetMoves (color, die) # #",
"in range(0,die-i+1): # t is number of hops out of circle MoveToCheck =",
"root.mainloop() # # Main # def Main(): GameOver = 0 # Is the",
"Green picks randomly from choices # Blue is the player .. or she",
"in range(1,die+1): if Board[dude+i] == color: yep=0 if yep: note = \"[Center\" if",
"Setup(): # Initialize the bases and colors for c in Colors: Base[c] =",
"Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\"",
"and pColor == \"Blue\": selection = 1 GotInput = 1 if pColor ==",
"What's this? I need to get the color given the # value. So",
"for dude in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude))",
"destination and die == 6: return False return True else: # The destination",
"= 1 GotInput = 1 elif pColor == \"Green\": # Take a random",
"duplicate logic from GetMoves and have it here only. # But, you know,",
"= 0 except TypeError: print (\"Please enter a number between 2 and 6.\")",
"Players # def Setup(): # Initialize the bases and colors for c in",
"destination >= HOME hm = Home[color] # hm means Home[color] hp = marble-HOME",
"tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() #",
"that we'll be returning with ALL valid moves response = [] # For",
"a 6 and want to do # a full revolution if dude !=",
"# # Handle \"regular\" motion starting here: # # CENTER SPACE HANDLING #",
"# # CENTER SPACE HANDLING # If my roll can take me to",
"that isn't bonking because it is me. special=0 if dude == finalspot: #",
"chr(0x00A4) # cool circle thing # Use a # to indicate start spaces",
"Center space if destination == CENTER: assert marble+die-1 in MagicCircle if CenterSpace ==",
"7, 21, 35, 49, 63, 77 ] # Locations for the magic circle",
"in the way return False else: # Still on the main board if",
"# # Roll(): # # Roll a die. # Returns an int between",
"4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5",
"%s hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk \" + CenterSpace + \"!\"",
"HOME=100 # \"Location\" for home spots - 100, 101, 102, 103 HOMESIZE=4 #",
"h in Home[p]: if h == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\")",
"die==1: for i in MagicCircle: if Board[i] != color: note = \"[Magic Circle\"",
"Movement WITHIN Home if marble >= HOME: assert marble < HOME+HOMESIZE assert destination",
"CIRCLE HANDLING # If I'm in the magic circle, I can continue normal",
"Board[finalspot] and not special: note += \" & \" if Board[finalspot] and not",
"robotMode = 1 elif NumPlayers < 2 or NumPlayers > 6: print (\"Please",
"CENTER assert destination != CENTER # Special case of 6 in the magic",
"to the list # Special processing: If the roll is a 6 in",
"= lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li # #",
"with ALL valid moves response = [] # For each marble, figure out",
"+= \"Bonk \" + Board[destination] + \"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination]))",
"color: # Passed through teammate # 6 in magic circle means I can",
"False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False return True",
"Display() # Show the initial game board while not GameOver: # Main game",
"BOARDSIZE: if Board[destination] == color and marble != destination and die != 6:",
"84 # Number of space around the main track CENTER=98 # \"Location\" of",
"print (\"Preparing a %d player game.\" %NumPlayers) if NumPlayers == 2 or NumPlayers",
"if Board[testloc] == color: # Can't pass teammate return False # Checked all",
"assert False return False # # SortMoves(myList) # # Used by .sorted to",
"True # MAGIC CIRCLE HANDLING if marble in MagicCircle: # magicStart is the",
"note += \"Bonk \" + Board[finalspot] if finalspot in MagicCircle or Board[finalspot]: note",
"die=%d) =\" %(color,die),response) return SortMoves(response) # # IsWinner(color) # # Determine if color",
"if i == 42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print it",
"Colors assert color in Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List",
"home status Marbles = {} # Dict of each color's marble locations Players",
"print (\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note =\"\" # Just in",
"to the nearest magic circle space, checking # that walk. if Board[destination-i] ==",
"ValidMove(dude, Start[color], die, color): assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else:",
"Valid moves only loc = dude+die # loc is destination space homeloc =",
"# CENTER SPACE HANDLING # If my roll can take me to one",
"<= 0: robotMode = 1 numPlayers *= -1 # TkSetup() Display() # Show",
"root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4)",
"in MagicCircle: return True # MAGIC CIRCLE HANDLING if marble in MagicCircle: #",
"else: deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color,",
"thie duplicate logic from GetMoves and have it here only. # But, you",
"the way return False else: # Still on the main board if Board[testloc]",
"we can bop around by adding die values # to the index in",
"to the index in that list magicStart = MagicCircle.index(marble) for i in range(0,die+1):",
"hope 2 magic circle space and then continue the # normal track, or",
"GotInput = 1 if pColor == \"Red\" or pColor == \"Purple\": # Blood",
"classic marble board game that can be very # frustrating! # from random",
"\": \" # Remove marble from source if source == CENTER: assert CenterSpace",
"the nearest magic circle space, checking # that walk. if Board[destination-i] == color:",
"if (die == 1 or die == 6) and (Board[Start[color]]!=color) and (1==firstStart): note",
"== \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print",
"CenterSpace == color CenterSpace = \"\" moveDesc += \"[Center] -> \" elif source",
"in that list magicStart = MagicCircle.index(marble) for i in range(0,die+1): if destination-i in",
"MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all the magic spaces between where I",
"if Board[(marble+i)%BOARDSIZE] == color: return False return True # Catch all assert False",
"!= color: note = \"[Magic Circle\" if Board[i]: note += \" & Bonk",
"= list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset,",
"if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE",
"\"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() #",
"= Start[color] if myStart == 0: # I have grown to hate Blue",
"print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in",
"Entering the Center space if destination == CENTER: assert marble+die-1 in MagicCircle if",
"destination >= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc += \"Home[\"",
"source == BASE: # Remove the marble from the base assert Base[color].count(color) >",
"turnNum = 0 robotMode = 0 # A human is needed numPlayers =",
"deadGuy = CenterSpace else: deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy)",
"note += \"Magic Circle\" if finalspot in MagicCircle and Board[finalspot] and not special:",
"# # Check if the move from marble to destination via die is",
"magic # circle, that isn't bonking because it is me. special=0 if dude",
"circle space if dude == CENTER: if die==1: for i in MagicCircle: if",
"== 2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or",
"Home if marble >= HOME: assert marble < HOME+HOMESIZE assert destination >= HOME",
"0 selection = 0 # Red always goes for the kill # White",
"Blue, with start space of 0, # because of modulo problems. elif (dude",
"%NumPlayers) if NumPlayers == 2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers",
"BASE: # Remove the marble from the base assert Base[color].count(color) > 0 Base[color].remove(color)",
"output = [\"-\" for x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space =",
"or ... if dude in MagicCircle: # circleNum is the index of where",
"print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and i < 63: if i ==",
"# Only want to add Start once for dude in Marbles[color]: # print",
"then continue the # normal track, or ... if dude in MagicCircle: #",
"21 and i < 42: if i == 31: if CenterSpace: cen =",
"can enter the Center. marble+die-1 is equal to MagicCircle+1 # Entering the Center",
"if Board[i]: note += \" & Bonk \" + Board[i] note += \"]\"",
">= HOME: hm = Home[color] # hm means Home[color] hp = dude-HOME #",
"HOME myStart = Start[color] if myStart == 0: # I have grown to",
"because I was having problems. :) I should probably remove # most of",
"return False if hp+i > HOMESIZE or hm[hp+i] == color: return False return",
"dude != MoveToCheck: # If it is not me, then it is someone",
"not GotInput: option=1 # Counter for the user input menu for move in",
"move if finish >= HOME: if strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d]",
"active players # Marbles[color] : { location0, location1, location2, location3 } # Start[color]",
"NumPlayers == 4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers",
"new game, and assigns player colors. # Returns: Number of Players # def",
"# Deal with bonking if destination is not empty if Board[destination]: moveDesc +=",
"into Home? if testloc >= HOMESIZE: # Ran off the end of Home",
"because of modulo problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or",
"hops out of circle MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color:",
"def Main(): GameOver = 0 # Is the game over turnNum = 0",
"CENTER: deadGuy = CenterSpace else: deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space)",
"and colors for c in Colors: Base[c] = [ c, c, c, c]",
"Start.values(): # What's this? I need to get the color given the #",
"ALL valid moves response = [] # For each marble, figure out all",
"of permutations for magic circle... for i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)]",
"I roll a 6 and want to do # a full revolution if",
"in range(0,numPlayers): again=1 # Flag for when a player rolls a 6 while",
"ending where you start if marble == destination and die == 6 and",
"in MagicCircle if CenterSpace == color: return False for i in range(1,die+1): if",
"in Base, see if it can get out if marble == BASE: assert",
"list circleNum = MagicCircle.index(dude) # Lots of permutations for magic circle... for i",
"False return True # Leaving the Center space if marble == CENTER: if",
"== \"Blue\": selection = 1 GotInput = 1 if pColor == \"Red\" or",
"color given the # value. So here's a bunch of casting black magic",
"\"Magic Circle\" if finalspot in MagicCircle and Board[finalspot] and not special: note +=",
"die values # to the index in that list magicStart = MagicCircle.index(marble) for",
"-2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\")",
"marble+i if testloc >= myStart: # testloc is in the Home zone testloc",
"die < 7 assert color # Quick check to see if there's a",
"random import randint #import tkinter # GLOBALS BOARDSIZE = 84 # Number of",
"# The destination is that color's start destination = Start[color] moveDesc += \"[Base]",
"are belong to us. Marbles[c] = [BASE, BASE, BASE, BASE ] robotMode =",
"BASE=99 # \"Location\" for base spots. All are 99. HOME=100 # \"Location\" for",
"revolution if dude != MoveToCheck: # If it is not me, then it",
"\"\" moveDesc += \"[Center] -> \" elif source == BASE: # Remove the",
"(\"\\t[%d] %d -> %d %s\" %(option, strt, finish,note)) else: print (\"\\t[%d] %d ->",
"# XXX: This could be replaced with Tk or something else. # def",
"(\"\\t[%d] %d -> %d %s\" %(option, strt, finish, note)) option+=1 try: selection =",
"If I'm in the magic circle, I can continue normal track, or #",
"color's home status Marbles = {} # Dict of each color's marble locations",
"6.\") Setup = 0 print (\"Preparing a %d player game.\" %NumPlayers) if NumPlayers",
"= 0 except TypeError: print (\"Bad input\") GotInput = 0 src,dst,note,distance = moves[selection-1]",
"marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\":",
"thing # Use a # to indicate start spaces elif i in Start.values():",
"in moves: strt, finish, note, distance = move if finish >= HOME: if",
"Start[color] moveDesc += \"[Base] -> \" elif source >= HOME: Home[color][source-HOME] = \"\"",
"\"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination)",
"now takes 1 # Cyan takes option 1 # Purple kills # Green",
"if i == 20: print() elif i >= 21 and i < 42:",
"- Start[color]) % BOARDSIZE response.append([dude, loc, note, distance]) # Movement WITHIN Home elif",
"Center %s\" %(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d] %d -> %d %s\"",
"myStart: if Board[loc]: note = \"[Bonk \" + Board[loc] + \"]\" if not",
"not me, then it is someone else badMove = 1 if t==0: #",
"to see if there's a teammate at the destination if destination < BOARDSIZE:",
"do # a full revolution if dude != MoveToCheck: # If it is",
"if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle:",
"] robotMode = 0 Setup = 0 # Has the game been setup?",
"(ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE):",
"elif i >= 21 and i < 42: if i == 31: if",
"Board[destination]: moveDesc += \"Bonk \" + Board[destination] + \"!\" print (\"Bonk! %s hits",
"Board[destination] == color and marble != destination and die != 6: return False",
"each color's marble locations Players = [] # List of active players #",
"if dude == finalspot: # End where I started special=1 note = \"\"",
"< 2 or NumPlayers > 6: print (\"Please enter a number between 2",
"return True # MAGIC CIRCLE HANDLING if marble in MagicCircle: # magicStart is",
"Print it backwards print(\"\\n\") for p in Players: print (\"%s\\t\" %p, end=\"\") print",
"finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif",
"% 10 == 0: output[i] = str(i // 10) else: #output[i] = (\"-\")",
"that list magicStart = MagicCircle.index(marble) for i in range(0,die+1): if destination-i in MagicCircle:",
"yep: note = \"[Center\" if CenterSpace: note += \" & Bonk \" +",
"and have it here only. # But, you know, this is working. #",
"(\"Preparing a %d player game.\" %NumPlayers) if NumPlayers == 2 or NumPlayers ==",
"badMove = 1 if t==0: # The magic circle is poisoned from #",
"Check magic circle spots I traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] ==",
"if finish == CENTER: print (\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif finish",
"CENTER: assert marble+die-1 in MagicCircle if CenterSpace == color: return False for i",
"in Base, see if it can get out if dude == BASE: if",
"0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue",
"magic circle space, checking # that walk. if Board[destination-i] == color: return False",
"+= \"Bonk \" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color elif destination",
"enter a number between 2 and 6.\") Setup=0 except KeyError: print (\"Please enter",
"if marble >= HOME: assert marble < HOME+HOMESIZE assert destination >= HOME hm",
"\"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the color to default",
"out the state of the board. # XXX: This could be replaced with",
"% BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! # print (\"[Leaving] GetMoves(color=%s die=%d)",
"and homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude,",
"return True # Catch all assert False return False # # SortMoves(myList) #",
"Roll(): return randint(1,6) # # Display(): # # Prints out the state of",
"is valid # Returns True / False # # This is pretty much",
"of thie duplicate logic from GetMoves and have it here only. # But,",
"\"White\": \"\\033[1;31;47m\", } # ANSI color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\":",
"# Check regular spots after I left circle for t in range(0,die-i+1): #",
"here's a bunch of casting black magic to # do that. thiscolor =",
"end=\"\") for i in range(0,BOARDSIZE): if i >=0 and i < 21: if",
"Dict of each color's marble locations Players = [] # List of active",
"hm means Home[color] hp = marble-HOME # hp means Home Position for i",
"== color: if marble == destination and die == 6: return False return",
"= 0 myStart = Start[color] if myStart == 0: # HACK for Blue",
"# # send a guy back to base # def Bonk(space): if space",
"end=\"\") # Occupied space else: # If you're on the magic circle, you",
"%d %s\" %(option,finish,note)) elif strt == BASE: print (\"\\t[%d] Base -> Start %s\"",
"# Something insane happened? # Movement WITHIN Home if marble >= HOME: assert",
"if dude+die-1 in MagicCircle and CenterSpace != color: yep=1 for i in range(1,die+1):",
"* to indicate magic circle if i in MagicCircle: #output[i] = \"*\" #print",
"index in that list magicStart = MagicCircle.index(marble) for i in range(0,die+1): if destination-i",
"and (Board[Start[color]]!=color): return True return False assert marble != BASE # CENTER SPACE",
"sucked so now takes 1 # Cyan takes option 1 # Purple kills",
"- HOME # homeloc is (potential) home space # Move into Home if",
"remove # most of thie duplicate logic from GetMoves and have it here",
"note += \" & Bonk \" + CenterSpace note += \"]\" if not",
"around by adding die values # to the index in that list circleNum",
"\"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\": 70 } # # Roll(): #",
"# Marbles! # # An implementation of the classic marble board game that",
"rain.\") robotMode = 1 elif NumPlayers < 2 or NumPlayers > 6: print",
"Start once for dude in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) - Check",
"So here's a bunch of casting black magic to # do that. thiscolor",
"If the roll is a 6 in magic # circle, that isn't bonking",
"when a player rolls a 6 while again: again=0 pColor = Players[p] myRoll",
"enter the Center. marble+die-1 is equal to MagicCircle+1 # Entering the Center space",
"== BASE: # Remove the marble from the base assert Base[color].count(color) > 0",
"Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination): # # Move marble of",
"= {} # Dict of each color's base status Home = {} #",
"dude in MagicCircle: # circleNum is the index of where we are in",
"end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h",
"where I entered # and where I exited for j in range(magicStart, magicDestination+1):",
"= randint(1,len(moves)) GotInput = 1 while not GotInput: option=1 # Counter for the",
"2 and 6.\") Setup = 0 except ValueError: print (\"Please enter a number",
"%(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if not moves: print (\"No moves available.\")",
"Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return",
"has won. Returns True/False # def IsWinner(color): win=1 for i in range(0, HOMESIZE):",
"start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\":",
"if not ValidMove(dude, finalspot, die, color): assert False distance = BOARDSIZE - (finalspot",
"ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen = chr(216) cen = chr(0x00A7) # Hurricane",
"\"\", \"\", \"\"] # Where are my marbles? All your base are belong",
"marble == destination and die == 6: return False return True else: #",
"# ANSI color codes for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\":",
"range(1,die+1): if Board[dude+i] == color: yep=0 if yep: note = \"[Center\" if CenterSpace:",
"very # frustrating! # from random import randint #import tkinter # GLOBALS BOARDSIZE",
"GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) # # IsWinner(color) # # Determine if",
"\"Purple\", \"Green\", \"White\" ] Board = [\"\" for x in range(0,BOARDSIZE)] CenterSpace =",
"win=1 for i in range(0, HOMESIZE): if Home[color][i] != color: win=0 break return",
"pColor == \"Purple\" or pColor == \"White\": # Always take the first option",
"see if there's a teammate at the destination if destination < BOARDSIZE: if",
"HOME hm = Home[color] # hm means Home[color] hp = marble-HOME # hp",
"end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in Home[p]: if",
"home? Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board =",
"Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\")",
"color: return False return True # \"NORMAL\" MOVEMENT if marble not in MagicCircle",
"6.\") Setup=0 except KeyError: print (\"Please enter a number between 2 and 6.\")",
"Board[destination] != color moveDesc += \"\" + str(destination) + \" \" # Deal",
"== 4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers ==",
"# # GetMoves (color, die) # # Return a list of the valid",
"return False return True # \"NORMAL\" MOVEMENT if marble not in MagicCircle and",
"elif i >= 42 and i < 63: if i == 42: print",
"Setup = 0 # Has the game been setup? while not Setup: try:",
"Return a list of the valid player options with a die roll #",
"finish, note, distance = move if finish >= HOME: if strt >= HOME:",
"moves[i][2]: selection = i+1 print (\"Kill!\", moves[i]) break if not selection: selection =",
"in magic circle means I can land on myself if mc == 6:",
"chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i])",
"or NumPlayers > 6: print (\"Please enter a number between 2 and 6.\")",
"moveDesc += \"Bonk \" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color elif",
"shall flow GotInput = 1 for i in range(0,len(moves)): if \"Bonk\" in moves[i][2]:",
"check to see if there's a teammate at the destination if destination <",
"end=\"\") # Print it backwards print(\"\\n\") for p in Players: print (\"%s\\t\" %p,",
"special=0 if dude == finalspot: # End where I started special=1 note =",
"again=0 pColor = Players[p] myRoll = Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll))",
"destination): # # Move marble of color color from source to destination. #",
"# If you're on the magic circle, you get an upper case #",
"if not ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) #",
"= 1 if selection < 1 or selection > len(moves): print (\"That's not",
"# Still on the main board if Board[testloc%BOARDSIZE] == color: # Can't pass",
"if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue if not selfPass: note =",
"number of hops out of circle MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck]",
"this? I need to get the color given the # value. So here's",
"for the kill # White tried to be optimal, but sucked so now",
"else: # Still on the main board if Board[testloc%BOARDSIZE] == color: # Can't",
"# GLOBALS BOARDSIZE = 84 # Number of space around the main track",
"the normal # track, or hope 2 magic circle space and then continue",
"SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li",
"in the way badMove = 1 else: # Still on the main board",
"note, distance]) continue assert dude != CENTER # MAGIC CIRCLE HANDLING # If",
"hp+i > HOMESIZE or hm[hp+i] == color: valid=0 continue if valid: if not",
"hp means Home Position for i in range(1,die+1): if(hp+i >= HOMESIZE): return False",
"CENTER SPACE HANDLING # If my roll can take me to one past",
"moves firstStart=1 # Only want to add Start once for dude in Marbles[color]:",
"I didn't pass a teammate between dude # and out badMove=0 circleBlock=0 #",
"where I roll a 6 and want to do # a full revolution",
"break return bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200,",
"be returning with ALL valid moves response = [] # For each marble,",
"- (loc - Start[color]) % BOARDSIZE response.append([dude, loc, note, distance]) # Movement WITHIN",
"White tried to be optimal, but sucked so now takes 1 # Cyan",
"= BOARDSIZE - (loc - Start[color]) % BOARDSIZE response.append([dude, loc, note, distance]) #",
"1 for i in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection = i+1 print",
"Board[i] note += \"]\" if not ValidMove(dude, i, die, color): assert False distance",
"from random import randint #import tkinter # GLOBALS BOARDSIZE = 84 # Number",
"MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert",
"# a full revolution if dude != MoveToCheck: # If it is not",
"game, and assigns player colors. # Returns: Number of Players # def Setup():",
"lambda x: x[0]) return sub_li # # GetMoves (color, die) # # Return",
"something else. # def Display(): # Color! # ANSI color codes for the",
"End where I started special=1 note = \"\" if (finalspot in MagicCircle) or",
"homeloc >= 0 and homeloc < HOMESIZE: return True assert False return False",
"distance]) # Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) #",
"a %d player game.\" %NumPlayers) if NumPlayers == 2 or NumPlayers == -2:",
"Check all the magic spaces between where I entered # and where I",
"pass a teammate between dude # and out badMove=0 circleBlock=0 # Check magic",
"end=\"\") print (output[104-i], end=\"\") # Print it backwards print(\"\\n\") for p in Players:",
"if myStart == 0: # HACK for Blue with start of 0 myStart",
"isn't bonking because it is me. special=0 if dude == finalspot: # End",
"between here and my final location for # teammates for i in range(1,die+1):",
"option: \")) GotInput = 1 if selection < 1 or selection > len(moves):",
"range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False return True # Leaving the Center",
"for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False return True #",
"Board[finalspot] and not special: note += \"Bonk \" + Board[finalspot] if finalspot in",
"(\"Like tears in rain.\") robotMode = 1 elif NumPlayers < 2 or NumPlayers",
"IsWinner(color): win=1 for i in range(0, HOMESIZE): if Home[color][i] != color: win=0 break",
"the index of where we are in the magic # circle list, so",
"or \\ (Start[color] == 0 and dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE):",
"CenterSpace = \"\" moveDesc += \"[Center] -> \" elif source == BASE: #",
"[] # For each marble, figure out all possible moves firstStart=1 # Only",
"in range(1,die+1): testloc = dude+i if not badMove and testloc >= myStart: #",
"(\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll be returning with ALL valid",
"return randint(1,6) # # Display(): # # Prints out the state of the",
"i in range(1,die+1): testloc = marble+i if testloc >= myStart: # testloc is",
"note)) elif strt == CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif strt",
"in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate # 6 in",
"rolled a 6! Take another turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s wins",
"could be replaced with Tk or something else. # def Display(): # Color!",
"center of death. BASE=99 # \"Location\" for base spots. All are 99. HOME=100",
"assert Board[destination] != color moveDesc += \"\" + str(destination) + \" \" #",
"CENTER # Special case of 6 in the magic circle ending where you",
"and destination space homeloc = destination - HOME # homeloc is (potential) home",
"MagicCircle: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish,note)) else: print (\"\\t[%d]",
"== 0: print (\"The only way to win is not to play.\") NumPlayers",
"return False else: # Still on the main board if Board[testloc] == color:",
"+= \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note +=",
"in magic # circle, that isn't bonking because it is me. special=0 if",
"(circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle case where I roll",
"or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\"",
"circle space and then continue the normal # track, or hope 2 magic",
"BOARDSIZE and destination < BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color:",
"= str(i // 10) else: #output[i] = (\"-\") output[i] = chr(0x00B7) # A",
"else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if i",
"i < 63: if i == 42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\")",
"\"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the color to default creset=\"\\033[m\"",
"-2: print (\"Like tears in rain.\") robotMode = 1 elif NumPlayers < 2",
"Display() print (response) if myRoll == 6: print(\"%s rolled a 6! Take another",
"print (\"No moves available.\") continue GotInput = 0 selection = 0 # Red",
"!= color: yep=1 for i in range(1,die+1): if Board[dude+i] == color: yep=0 if",
"if Board[dude+i] == color: yep=0 if yep: note = \"[Center\" if CenterSpace: note",
"%s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor, src, dst) Display() print (response) if",
"to indicate start spaces elif i in Start.values(): # What's this? I need",
"= Board[i] if space == \"\": # Use a * to indicate magic",
"+= \"]\" if not ValidMove(dude, i, die, color): assert False distance = BOARDSIZE",
"canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main",
"= Setup() if numPlayers <= 0: robotMode = 1 numPlayers *= -1 #",
"\"Home[\" + str(source-HOME+1) + \"] -> \" else: assert Board[source] == color Board[source]",
"+= \"Magic Circle\" if finalspot in MagicCircle and Board[finalspot] and not special: note",
"note, distance = move if finish >= HOME: if strt >= HOME: print(\"\\t[%d]",
"if i in MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4)",
"circle spots I traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: #",
"= {} # Dict of each color's home status Marbles = {} #",
"} # ANSI color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\":",
"response.append([dude, finalspot, note, distance]) # MOVEMENT INTO HOME # NB: Add special cases",
"if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response",
"elif NumPlayers == 4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif",
"or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers",
"option=1 # Counter for the user input menu for move in moves: strt,",
"on the Board elif loc < myStart: if Board[loc]: note = \"[Bonk \"",
"I got a one, I can roll out to any # magic circle",
"# hop one magic circle space and then continue the normal # track,",
"ValidMove(dude, CENTER, die, color): assert False distance = BOARDSIZE - 8 response.append([dude, CENTER,",
"Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\" if",
"numPlayers = Setup() if numPlayers <= 0: robotMode = 1 numPlayers *= -1",
"# somebody in the way return False else: # Still on the main",
"str(i // 10) else: #output[i] = (\"-\") output[i] = chr(0x00B7) # A nice",
"== color CenterSpace = \"\" moveDesc += \"[Center] -> \" elif source ==",
"not badMove: # Valid moves only loc = dude+die # loc is destination",
"circleBlock: continue if not badMove: # Add this to the list # Special",
"-> Center %s\" %(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d] %d -> %d",
"1 elif pColor == \"Cyan\" or pColor == \"Purple\" or pColor == \"White\":",
"BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT",
"NumPlayers = int(input(\"How many players? \")) if NumPlayers == 0: print (\"The only",
"# can enter the Center. dude+die-1 is equal to MagicCircle+1 if dude+die-1 in",
"else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): # # Gets the board",
"Home Position for i in range(1,die+1): if(hp+i >= HOMESIZE): return False if hp+i",
"die == 6) and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if Board[Start[color]]: note",
"\" + Board[Start[color]] note += \"]\" if not ValidMove(dude, Start[color], die, color): assert",
"GotInput = 1 while not GotInput: option=1 # Counter for the user input",
"location for # teammates for i in range(1,die+1): testloc = marble+i if testloc",
"color from source to destination. # def Move(color, source, destination): global CenterSpace moveDesc",
"because it is me. special=0 if dude == finalspot: # End where I",
"def Setup(): # Initialize the bases and colors for c in Colors: Base[c]",
"False distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note,",
"output[i] = chr(0x00B7) # A nice dot #print (\"-\", end=\"\") # Occupied space",
"# Move marble of color color from source to destination. # def Move(color,",
"the center and I got a one, I can roll out to any",
"and i < 42: if i == 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset",
"if finalspot in MagicCircle: note += \"Magic Circle\" if finalspot in MagicCircle and",
"HACK for Blue with start of 0 myStart = BOARDSIZE for i in",
"elif pColor == \"Green\": # Take a random option selection = randint(1,len(moves)) GotInput",
"teammate at the destination if destination < BOARDSIZE: if Board[destination] == color and",
"-1 # TkSetup() Display() # Show the initial game board while not GameOver:",
"Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\")",
"player game.\" %NumPlayers) if NumPlayers == 2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\")",
"or Board[finalspot]: note += \"]\" if not ValidMove(dude, finalspot, die, color): assert False",
"Players[p] myRoll = Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor,",
"} # Reset the color to default creset=\"\\033[m\" output = [\"-\" for x",
"# Move(color, source, destination): # # Move marble of color color from source",
"p in range(0,numPlayers): again=1 # Flag for when a player rolls a 6",
"this game myStart = BOARDSIZE if marble < myStart and marble+die >= myStart:",
"# Roll a die. # Returns an int between 1 and 6 #",
"%(color, CenterSpace)) moveDesc += \"Bonk \" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace =",
"color: # Can't pass teammate return False # Checked all intermediate spaces, and",
"# hm means Home[color] hp = marble-HOME # hp means Home Position for",
"return False assert marble != CENTER assert destination != CENTER # Special case",
"NB: Add special cases for Blue, with start space of 0, # because",
"# Quick check to see if there's a teammate at the destination if",
"marble != CENTER assert destination != CENTER # Special case of 6 in",
"index in that list circleNum = MagicCircle.index(dude) # Lots of permutations for magic",
"color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\",",
"big is your home? Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\"",
"any # magic circle space if dude == CENTER: if die==1: for i",
"False distance = BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot, note,",
"in the magic circle, I can continue normal track, or # hop one",
"homeloc = destination - HOME # homeloc is (potential) home space # Move",
"} # # Roll(): # # Roll a die. # Returns an int",
"source to destination. # def Move(color, source, destination): global CenterSpace moveDesc = color",
"Board[loc] + \"]\" if not ValidMove(dude, loc, die, color): assert False distance =",
"on the magic circle, you get an upper case # letter if i",
"Circle\" if finalspot in MagicCircle and Board[finalspot] and not special: note += \"",
"output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if i >=0",
"-6 and NumPlayers <= -2: print (\"Like tears in rain.\") robotMode = 1",
"the magic # circle list, so we can bop around by adding die",
"print() # # Setup(): # # Gets the board ready for a new",
"Board[i] != color: note = \"[Magic Circle\" if Board[i]: note += \" &",
"= 0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1",
"moves only loc = dude+die # loc is destination space homeloc = loc",
"But, you know, this is working. # def ValidMove(marble, destination, die, color): #",
"%s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and i <",
"31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen = chr(216)",
"Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT INTO HOME # NB:",
"is poisoned from # here on out.. circleBlock = 1 continue if circleBlock:",
"# Purple kills # Green picks randomly from choices # Blue is the",
"GLOBALS BOARDSIZE = 84 # Number of space around the main track CENTER=98",
"+= \"[Center] -> \" elif source == BASE: # Remove the marble from",
"response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass =",
"if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or",
"< Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color] if",
"HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d]",
"random option selection = randint(1,len(moves)) GotInput = 1 while not GotInput: option=1 #",
"color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove (marble, destination, die) # #",
"Returns True / False # # This is pretty much a duplicate of",
"I left circle for t in range(0,die-i+1): # t is number of hops",
"circle for t in range(0,die-i+1): # t is number of hops out of",
"location3 } # Start[color] : space# Start = { \"Blue\": 0, \"Red\": 14,",
"empty if Board[destination]: moveDesc += \"Bonk \" + Board[destination] + \"!\" print (\"Bonk!",
"startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 == 0:",
"destination = Start[color] moveDesc += \"[Base] -> \" elif source >= HOME: Home[color][source-HOME]",
"I was having problems. :) I should probably remove # most of thie",
"\" -> \" # Deal with possible destinations if destination == CENTER: assert",
"(\"\\tHome:\\t\", end=\"\") for h in Home[p]: if h == \"\": #print (\"-\", end=\"\")",
"ValueError: if len(moves) == 1: selection = 1 GotInput = 1 else: print",
"end=\"\") for h in Home[p]: if h == \"\": #print (\"-\", end=\"\") print",
"Base[p]: if b == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print",
"die, color): assert False distance = BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE",
"where we are in the magic # circle list, so we can bop",
"%(marble, destination, die, color)) assert die > 0 and die < 7 assert",
"a number between 2 and 6.\") Setup = 0 except TypeError: print (\"Please",
"print(\"\\n\") for p in Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for",
"is the player .. or she chooses 1 # Deckard is a replicant!",
"# magicStart is the index of where we are in the magic #",
"return True else: # The destination is not in the magic circle, so",
"HOME+homeloc, \"[Home]\", 0]) # Still on the Board elif loc < myStart: if",
"except ValueError: if len(moves) == 1: selection = 1 GotInput = 1 else:",
"MagicCircle = [ 7, 21, 35, 49, 63, 77 ] # Locations for",
"(\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc",
"(i - Start[color]) % BOARDSIZE response.append([dude, i, note, distance]) continue assert dude !=",
"= 1 # Check regular spots after I left circle for t in",
"%d player game.\" %NumPlayers) if NumPlayers == 2 or NumPlayers == -2: Players.append(\"Blue\")",
"homeloc is home space # Move into Home if homeloc >= 0 and",
"HOME # NB: Add special cases for Blue, with start space of 0,",
"Catch all assert False return False # # SortMoves(myList) # # Used by",
"color color from source to destination. # def Move(color, source, destination): global CenterSpace",
"pass teammate badMove = 1 # End of for i in range(1,die) if",
"= 1 elif pColor == \"Green\": # Take a random option selection =",
"+ CenterSpace note += \"]\" if not ValidMove(dude, CENTER, die, color): assert False",
"0 robotMode = 0 # A human is needed numPlayers = Setup() if",
"== finalspot: # End where I started special=1 note = \"\" if (finalspot",
"turnNum += 1 for p in range(0,numPlayers): again=1 # Flag for when a",
"== 6) and (Board[Start[color]]!=color): return True return False assert marble != BASE #",
"Home[color] # hm means Home[color] hp = dude-HOME # hp means Home Position",
"= 1 for i in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection = i+1",
"base spots. All are 99. HOME=100 # \"Location\" for home spots - 100,",
"the end of Home badMove = 1 elif Home[color][testloc]: # somebody in the",
"source if source == CENTER: assert CenterSpace == color CenterSpace = \"\" moveDesc",
"< HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\",",
"in range(1,die+1): if(hp+i >= HOMESIZE): return False if hp+i > HOMESIZE or hm[hp+i]",
"+= \" & Bonk \" + Board[i] note += \"]\" if not ValidMove(dude,",
"do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] =",
"randint(1,6) # # Display(): # # Prints out the state of the board.",
"game been setup? while not Setup: try: Setup=1 NumPlayers = int(input(\"How many players?",
"+ \"!\" Bonk(CENTER) CenterSpace = color elif destination >= HOME: assert Home[color][destination-HOME] !=",
"= 1 else: # Still on the main board if Board[testloc%BOARDSIZE] == color:",
"def IsWinner(color): win=1 for i in range(0, HOMESIZE): if Home[color][i] != color: win=0",
"selection = 1 GotInput = 1 else: print (\"Bad input\") GotInput = 0",
"0 except TypeError: print (\"Bad input\") GotInput = 0 src,dst,note,distance = moves[selection-1] if",
"# print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll be returning with",
"or (Board[finalspot]): note += \"[\" if finalspot in MagicCircle: note += \"Magic Circle\"",
"This could be replaced with Tk or something else. # def Display(): #",
"%p, end=\"\") print (\"Base:\\t\", end=\"\") for b in Base[p]: if b == \"\":",
"is home space # Move into Home if homeloc >= 0 and homeloc",
"is number of hops out of circle MoveToCheck = (circleExit + t)%BOARDSIZE if",
"between 2 and 6.\") Setup = 0 except ValueError: print (\"Please enter a",
"hm means Home[color] hp = dude-HOME # hp means Home Position valid=1 for",
"%d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor, src, dst) Display() print",
"# Gets the board ready for a new game, and assigns player colors.",
"c, c] Home[c] = [ \"\", \"\", \"\", \"\"] # Where are my",
"CENTER # MAGIC CIRCLE HANDLING # If I'm in the magic circle, I",
"or hope 2 magic circle space and then continue the # normal track,",
"Board = [\"\" for x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [",
"each color's base status Home = {} # Dict of each color's home",
"Home[p]: if h == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print",
"# End of for i in range(1,die) if not badMove: # Valid moves",
"# Blue is the player .. or she chooses 1 # Deckard is",
"space if dude == CENTER: if die==1: for i in MagicCircle: if Board[i]",
"Still on the main board if Board[testloc] == color: # Can't pass teammate",
"if NumPlayers == 0: print (\"The only way to win is not to",
"Board[testloc%BOARDSIZE] == color: # Can't pass teammate badMove = 1 # End of",
"range(0, HOMESIZE): if Home[color][i] != color: win=0 break return bool(win) def TkSetup(): root",
"1 # Check regular spots after I left circle for t in range(0,die-i+1):",
"# most of thie duplicate logic from GetMoves and have it here only.",
"\"White\": 70 } # # Roll(): # # Roll a die. # Returns",
"+= \"Home[\" + str(source-HOME+1) + \"] -> \" else: assert Board[source] == color",
"# print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die, color)) assert die",
"roll a 6 and want to do # a full revolution if dude",
"for base spots. All are 99. HOME=100 # \"Location\" for home spots -",
"Ran off the end of Home return False elif Home[color][testloc]: # somebody in",
"Home[color] # hm means Home[color] hp = marble-HOME # hp means Home Position",
"destinations if destination == CENTER: assert CenterSpace != color moveDesc += \"[Center] \"",
"having problems. :) I should probably remove # most of thie duplicate logic",
"(dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" & \" if Board[(dude+die)%BOARDSIZE]: note",
"GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note =\"\" # Just in case, clear",
"space around the main track CENTER=98 # \"Location\" of the center of death.",
"42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print it backwards print(\"\\n\") for",
"color: # Handle case where I roll a 6 and want to do",
"BOARDSIZE for i in range(1,die+1): testloc = dude+i if not badMove and testloc",
"== 20: print() elif i >= 21 and i < 42: if i",
"A nice dot #print (\"-\", end=\"\") # Occupied space else: # If you're",
"die > 0 and die < 7 assert color in Colors assert color",
"and die == 6 and marble in MagicCircle: return True # MAGIC CIRCLE",
"TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid()",
"b in Base[p]: if b == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\")",
"MagicCircle or Board[finalspot]: note += \"]\" if not ValidMove(dude, finalspot, die, color): assert",
">= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc += \"Home[\" +",
"# # SortMoves(myList) # # Used by .sorted to return lists in order",
"# Dict of each color's home status Marbles = {} # Dict of",
"== \"Green\": # Take a random option selection = randint(1,len(moves)) GotInput = 1",
"= chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i",
"color's marble locations Players = [] # List of active players # Marbles[color]",
"the main board if Board[testloc] == color: # Can't pass teammate return False",
"board ready for a new game, and assigns player colors. # Returns: Number",
"print(output[i], end=\"\") if i == 20: print() elif i >= 21 and i",
"destination != CENTER # Special case of 6 in the magic circle ending",
"(\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor, src, dst)",
"not ValidMove(dude, i, die, color): assert False distance = BOARDSIZE - (i -",
"Take another turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s wins in %d turns!\"",
"a 6! Take another turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s wins in",
"you start if marble == destination and die == 6 and marble in",
"print (\"Please enter a number between 2 and 6.\") Setup = 0 except",
"for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\",",
"that color's start destination = Start[color] moveDesc += \"[Base] -> \" elif source",
"# back to the nearest magic circle space, checking # that walk. if",
"spots - 100, 101, 102, 103 HOMESIZE=4 # How big is your home?",
"elif i in Start.values(): # What's this? I need to get the color",
"\"Bonk \" + Board[destination] + \"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination)",
"or pColor == \"White\": # Always take the first option selection = 1",
"Board elif loc < myStart: if Board[loc]: note = \"[Bonk \" + Board[loc]",
"marble == destination and die == 6 and marble in MagicCircle: return True",
"print (\"Please enter a number between 2 and 6.\") Setup=0 except KeyError: print",
"player options with a die roll # def GetMoves(color,die): assert die > 0",
"# def Roll(): return randint(1,6) # # Display(): # # Prints out the",
"Home elif dude >= HOME: hm = Home[color] # hm means Home[color] hp",
"else: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish, note)) option+=1 try:",
"#sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li # # GetMoves (color, die) #",
"Bonk \" + Board[Start[color]] note += \"]\" if not ValidMove(dude, Start[color], die, color):",
"the end of Home return False elif Home[color][testloc]: # somebody in the way",
"with possible destinations if destination == CENTER: assert CenterSpace != color moveDesc +=",
"enter a number between 2 and 6.\") Setup = 0 print (\"Preparing a",
"%d -> Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d]",
"# that walk. if Board[destination-i] == color: return False return True assert marble",
"strt, finish, note)) option+=1 try: selection = int(input(pColor + \": Please select an",
"False assert marble != CENTER assert destination != CENTER # Special case of",
"8 response.append([dude, CENTER, note, distance]) # If I'm in the center and I",
"%s\" %(option,note)) else: if finish == CENTER: print (\"\\t[%d] %d -> Center %s\"",
"continue if valid: if not ValidMove(dude, dude+die, die, color): assert False response.append([dude, dude+die,",
"and CenterSpace != color: yep=1 for i in range(1,die+1): if Board[dude+i] == color:",
"selection = 0 # Red always goes for the kill # White tried",
"Test the spaces between here and my final location for # teammates for",
"to win is not to play.\") NumPlayers = -6 robotMode = 1 elif",
"0 and homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color): assert False",
"means Home Position valid=1 for i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue",
"can enter the Center. dude+die-1 is equal to MagicCircle+1 if dude+die-1 in MagicCircle",
"102, 103 HOMESIZE=4 # How big is your home? Colors = [ \"Blue\",",
"= color moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\" else: # Board destination",
"== 1 or die == 6) and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\"",
"value. So here's a bunch of casting black magic to # do that.",
"guy back to base # def Bonk(space): if space == CENTER: deadGuy =",
"continue if not badMove: # Add this to the list # Special processing:",
"0 except ValueError: if len(moves) == 1: selection = 1 GotInput = 1",
"print (\"The only way to win is not to play.\") NumPlayers = -6",
"started special=1 note = \"\" if (finalspot in MagicCircle) or (Board[finalspot]): note +=",
"my final location for # teammates for i in range(1,die+1): testloc = marble+i",
"means I can land on myself if mc == 6: pass else: badMove",
"case # letter if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\")",
"game board while not GameOver: # Main game loop turnNum += 1 for",
"us. Marbles[c] = [BASE, BASE, BASE, BASE ] robotMode = 0 Setup =",
"HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc += \"Home[\" + str(destination-HOME+1)",
"i == 42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print it backwards",
"hate Blue in this game myStart = BOARDSIZE if marble < myStart and",
"die, color): assert False distance = BOARDSIZE - (i - Start[color]) % BOARDSIZE",
"%(option, strt, finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d] Center -> %d %s\"",
"# Dict of each color's marble locations Players = [] # List of",
"yep=0 if yep: note = \"[Center\" if CenterSpace: note += \" & Bonk",
"to us. Marbles[c] = [BASE, BASE, BASE, BASE ] robotMode = 0 Setup",
"\"Purple\" or pColor == \"White\": # Always take the first option selection =",
"or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers ==",
"dude+die-1 in MagicCircle and CenterSpace != color: yep=1 for i in range(1,die+1): if",
"color): assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude,",
"Setup = 0 except TypeError: print (\"Please enter a number between 2 and",
"bunch of casting black magic to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i]",
"== color: # Can't pass teammate return False # Checked all intermediate spaces,",
"1 elif NumPlayers >= -6 and NumPlayers <= -2: print (\"Like tears in",
"(\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note =\"\" # Just in case,",
"elif dude >= HOME: hm = Home[color] # hm means Home[color] hp =",
"%(color,die),response) return SortMoves(response) # # IsWinner(color) # # Determine if color has won.",
"loc is destination space homeloc = loc - myStart # homeloc is home",
"1 or selection > len(moves): print (\"That's not an option. Try again.\") GotInput",
"moves: print (\"No moves available.\") continue GotInput = 0 selection = 0 #",
"(\"\\t\", end=\"\") print(output[i], end=\"\") if i == 20: print() elif i >= 21",
"(dude+die)%BOARDSIZE, note, distance]) # Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return",
"win is not to play.\") NumPlayers = -6 robotMode = 1 elif NumPlayers",
"if it can get out if marble == BASE: assert destination == Start[color]",
"testloc >= HOMESIZE: # Ran off the end of Home badMove = 1",
"True/False # def IsWinner(color): win=1 for i in range(0, HOMESIZE): if Home[color][i] !=",
"# A nice dot #print (\"-\", end=\"\") # Occupied space else: # If",
"the center or Home assert Board[destination] != color moveDesc += \"\" + str(destination)",
"False else: # Still on the main board if Board[testloc] == color: #",
"loc, die, color): assert False distance = BOARDSIZE - (loc - Start[color]) %",
"the index in that list circleNum = MagicCircle.index(dude) # Lots of permutations for",
"she chooses 1 # Deckard is a replicant! if robotMode and pColor ==",
"a replicant! if robotMode and pColor == \"Blue\": selection = 1 GotInput =",
"output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\")",
"see if it can get out if dude == BASE: if (die ==",
"only way to win is not to play.\") NumPlayers = -6 robotMode =",
"%s\" %(option, strt, finish, note)) option+=1 try: selection = int(input(pColor + \": Please",
"destination is not the center or Home assert Board[destination] != color moveDesc +=",
"== CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif strt == BASE: print",
"marble >= HOME: assert marble < HOME+HOMESIZE assert destination >= HOME hm =",
"mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate # 6",
"True return False assert marble != BASE # CENTER SPACE HANDLING # If",
"# A human is needed numPlayers = Setup() if numPlayers <= 0: robotMode",
"# testloc is in the Home zone testloc -= myStart # How many",
"print (output[104-i], end=\"\") # Print it backwards print(\"\\n\") for p in Players: print",
"if testloc >= HOMESIZE: # Ran off the end of Home return False",
"1 # Purple kills # Green picks randomly from choices # Blue is",
"Use a # to indicate start spaces elif i in Start.values(): # What's",
"NumPlayers == 0: print (\"The only way to win is not to play.\")",
"if myStart == 0: # I have grown to hate Blue in this",
"return SortMoves(response) # # IsWinner(color) # # Determine if color has won. Returns",
"chooses 1 # Deckard is a replicant! if robotMode and pColor == \"Blue\":",
"Board[loc]: note = \"[Bonk \" + Board[loc] + \"]\" if not ValidMove(dude, loc,",
"dude == CENTER: if die==1: for i in MagicCircle: if Board[i] != color:",
"1 elif NumPlayers < 2 or NumPlayers > 6: print (\"Please enter a",
"if Board[(marble+i)%BOARDSIZE] == color: return False return True # Leaving the Center space",
"die==1 and Board[destination] != color: return True else: return False assert marble !=",
"color): assert False distance = BOARDSIZE - (i - Start[color]) % BOARDSIZE response.append([dude,",
"an upper case # letter if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print",
"assert destination != CENTER # Special case of 6 in the magic circle",
"+= \" & Bonk \" + Board[Start[color]] note += \"]\" if not ValidMove(dude,",
"\" if Board[finalspot] and not special: note += \"Bonk \" + Board[finalspot] if",
"the move from marble to destination via die is valid # Returns True",
"dude+die # loc is destination space homeloc = loc - myStart # homeloc",
"1 if pColor == \"Red\" or pColor == \"Purple\": # Blood shall flow",
"exited for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if marble ==",
"\"Bonk \" + Board[finalspot] if finalspot in MagicCircle or Board[finalspot]: note += \"]\"",
"selection = int(input(pColor + \": Please select an option: \")) GotInput = 1",
"%s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option,",
"there's a teammate at the destination if destination < BOARDSIZE: if Board[destination] ==",
"Board[Start[color]] note += \"]\" if not ValidMove(dude, Start[color], die, color): assert False response.append([dude,",
"moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False",
"\"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\"",
"location1, location2, location3 } # Start[color] : space# Start = { \"Blue\": 0,",
"How big is your home? Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\",",
"elif destination >= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc +=",
"for i in range(0,BOARDSIZE): space = Board[i] if space == \"\": # Use",
"+= \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" &",
"values # to the index in that list magicStart = MagicCircle.index(marble) for i",
">= Start[color]) or \\ (Start[color] == 0 and dude < Start[color]+BOARDSIZE and dude+die",
"+ \"]\" else: # Board destination is not the center or Home assert",
"not in the magic circle, so walk # back to the nearest magic",
"(\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) # # IsWinner(color) # # Determine",
"so we can bop around by adding die values # to the index",
"Main # def Main(): GameOver = 0 # Is the game over turnNum",
"needed numPlayers = Setup() if numPlayers <= 0: robotMode = 1 numPlayers *=",
"False return True assert marble not in MagicCircle # MOVEMENT INTO HOME myStart",
"if not badMove: # Add this to the list # Special processing: If",
"of active players # Marbles[color] : { location0, location1, location2, location3 } #",
"spots I traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed",
"assert Base[color].count(color) > 0 Base[color].remove(color) # The destination is that color's start destination",
"Cyan takes option 1 # Purple kills # Green picks randomly from choices",
"Base[deadGuy].append(deadGuy) # # Move(color, source, destination): # # Move marble of color color",
"#output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif",
">= HOMESIZE): return False if hp+i > HOMESIZE or hm[hp+i] == color: return",
"enter the Center. dude+die-1 is equal to MagicCircle+1 if dude+die-1 in MagicCircle and",
"# \"NORMAL\" MOVEMENT if marble not in MagicCircle and marble < BOARDSIZE and",
"70 } # # Roll(): # # Roll a die. # Returns an",
"end=\"\") elif i % 10 == 0: output[i] = str(i // 10) else:",
"= 1 elif Home[color][testloc]: # somebody in the way badMove = 1 else:",
"Move(color, source, destination): global CenterSpace moveDesc = color + \": \" # Remove",
"lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li # # GetMoves",
"if(hp+i >= HOMESIZE): return False if hp+i > HOMESIZE or hm[hp+i] == color:",
"if i == 0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if i == 20:",
"if robotMode and pColor == \"Blue\": selection = 1 GotInput = 1 if",
"!= color: win=0 break return bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas",
"+ str(source) + \" -> \" # Deal with possible destinations if destination",
"destination == Start[color] if (die == 1 or die == 6) and (Board[Start[color]]!=color):",
"in Colors assert color in Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) #",
"Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color] if myStart",
"# frustrating! # from random import randint #import tkinter # GLOBALS BOARDSIZE =",
"color: return False return True # Catch all assert False return False #",
"in MagicCircle: # magicStart is the index of where we are in the",
"!= CENTER # MAGIC CIRCLE HANDLING # If I'm in the magic circle,",
"tears in rain.\") robotMode = 1 elif NumPlayers < 2 or NumPlayers >",
"Bonk(space): if space == CENTER: deadGuy = CenterSpace else: deadGuy = Board[space] Board[space]",
"Roll(): # # Roll a die. # Returns an int between 1 and",
"if hp+i > HOMESIZE or hm[hp+i] == color: valid=0 continue if valid: if",
"from # here on out.. circleBlock = 1 continue if circleBlock: continue if",
"+ Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not",
"CenterSpace else: deadGuy = Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # #",
"for i in range(0, HOMESIZE): if Home[color][i] != color: win=0 break return bool(win)",
"HOMESIZE): if Home[color][i] != color: win=0 break return bool(win) def TkSetup(): root =",
"%d -> %d %s\" %(option, strt, finish, note)) option+=1 try: selection = int(input(pColor",
"1 else: print (\"Bad input\") GotInput = 0 except TypeError: print (\"Bad input\")",
"False assert marble != BASE # CENTER SPACE HANDLING # If my roll",
"# hp means Home Position valid=1 for i in range(1,die+1): if(hp+i >= HOMESIZE):",
"\" & Bonk \" + Board[Start[color]] note += \"]\" if not ValidMove(dude, Start[color],",
"add Start once for dude in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) -",
"# hm means Home[color] hp = dude-HOME # hp means Home Position valid=1",
"circleNum is the index of where we are in the magic # circle",
"len(moves) == 1: selection = 1 GotInput = 1 else: print (\"Bad input\")",
"(marble, destination, die) # # Check if the move from marble to destination",
"print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): # # Gets the board ready",
"== -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers == -3: Players.append(\"Blue\")",
"t is number of hops out of circle MoveToCheck = (circleExit + t)%BOARDSIZE",
"full revolution if dude != MoveToCheck: # If it is not me, then",
"and die < 7 assert color # Quick check to see if there's",
"NumPlayers == 2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3",
"marble-HOME # hp means Home Position for i in range(1,die+1): if(hp+i >= HOMESIZE):",
"(response) if myRoll == 6: print(\"%s rolled a 6! Take another turn.\" %pColor)",
"print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in Home[p]: if h ==",
"for i in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection = i+1 print (\"Kill!\",",
"player .. or she chooses 1 # Deckard is a replicant! if robotMode",
"tkinter # GLOBALS BOARDSIZE = 84 # Number of space around the main",
"# IsWinner(color) # # Determine if color has won. Returns True/False # def",
"it is someone else badMove = 1 if t==0: # The magic circle",
"replicant! if robotMode and pColor == \"Blue\": selection = 1 GotInput = 1",
"False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass",
"BASE ] robotMode = 0 Setup = 0 # Has the game been",
"False distance = BOARDSIZE - (loc - Start[color]) % BOARDSIZE response.append([dude, loc, note,",
"GotInput = 0 selection = 0 # Red always goes for the kill",
"of GetMoves() but it serves as a # check because I was having",
"for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate #",
"return NumPlayers # # Bonk! # # send a guy back to base",
"get an upper case # letter if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset",
"#cen = \"-\" #cen = chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t",
"hm = Home[color] # hm means Home[color] hp = dude-HOME # hp means",
"magic circle is poisoned from # here on out.. circleBlock = 1 continue",
"If I'm in the center and I got a one, I can roll",
"not selfPass: note = \"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note +=",
"CENTER: print (\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif finish in MagicCircle: print",
"always goes for the kill # White tried to be optimal, but sucked",
"dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color] if myStart == 0:",
"1 or die == 6) and (Board[Start[color]]!=color): return True return False assert marble",
"import randint #import tkinter # GLOBALS BOARDSIZE = 84 # Number of space",
"def Roll(): return randint(1,6) # # Display(): # # Prints out the state",
"is the index of where we are in the magic # circle list,",
"Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue if not selfPass: note = \"\"",
"the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\",",
"marble != BASE # CENTER SPACE HANDLING # If my roll can take",
"Home[color][testloc]: # somebody in the way return False else: # Still on the",
"robotMode = 1 numPlayers *= -1 # TkSetup() Display() # Show the initial",
"each color's home status Marbles = {} # Dict of each color's marble",
"finalspot in MagicCircle and Board[finalspot] and not special: note += \" & \"",
"response.append([dude, i, note, distance]) continue assert dude != CENTER # MAGIC CIRCLE HANDLING",
"if finalspot in MagicCircle and Board[finalspot] and not special: note += \" &",
"circle if i in MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\") output[i] =",
"(die-i))%BOARDSIZE # Now verify that I didn't pass a teammate between dude #",
"end=\"\") print(output[i], end=\"\") if i == 20: print() elif i >= 21 and",
"# def GetMoves(color,die): assert die > 0 and die < 7 assert color",
"# If it is not me, then it is someone else badMove =",
"homeloc = loc - myStart # homeloc is home space # Move into",
"pColor == \"Red\" or pColor == \"Purple\": # Blood shall flow GotInput =",
"> len(moves): print (\"That's not an option. Try again.\") GotInput = 0 except",
"elif pColor == \"Cyan\" or pColor == \"Purple\" or pColor == \"White\": #",
"Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\")",
"a one, I can roll out to any # magic circle space if",
"is not empty if Board[destination]: moveDesc += \"Bonk \" + Board[destination] + \"!\"",
"circle list, so we can bop around by adding die values # to",
"+ Board[Start[color]] note += \"]\" if not ValidMove(dude, Start[color], die, color): assert False",
"I exited for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if marble",
"marble < myStart and marble+die >= myStart: # Test the spaces between here",
"myRoll) if not moves: print (\"No moves available.\") continue GotInput = 0 selection",
"list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\")",
"around by adding die values # to the index in that list magicStart",
"me, then it is someone else badMove = 1 if t==0: # The",
"(output[104-i], end=\"\") # Print it backwards print(\"\\n\") for p in Players: print (\"%s\\t\"",
"1 elif pColor == \"Green\": # Take a random option selection = randint(1,len(moves))",
"ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if i >=0 and i",
"not badMove: # Add this to the list # Special processing: If the",
"option. Try again.\") GotInput = 0 except ValueError: if len(moves) == 1: selection",
"testloc >= myStart: # testloc is in the Home zone testloc -= myStart",
"start of 0 myStart = BOARDSIZE for i in range(1,die+1): testloc = dude+i",
"(ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if i >=0 and i < 21:",
"selection = 1 elif pColor == \"Cyan\" or pColor == \"Purple\" or pColor",
"happened? # Movement WITHIN Home if marble >= HOME: assert marble < HOME+HOMESIZE",
"and want to do # a full revolution if dude != MoveToCheck: #",
"Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\")",
"!= color Home[color][destination-HOME] = color moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\" else:",
"i in range(1,die+1): testloc = dude+i if not badMove and testloc >= myStart:",
"- myStart # homeloc is home space # Move into Home if homeloc",
"= Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination):",
"\"-\" #cen = chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i]))",
"this marble is in Base, see if it can get out if dude",
"% BOARDSIZE response.append([dude, loc, note, distance]) # Movement WITHIN Home elif dude >=",
"6: print (\"Please enter a number between 2 and 6.\") Setup=0 except KeyError:",
"and assigns player colors. # Returns: Number of Players # def Setup(): #",
"(\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for b in Base[p]: if b ==",
"nice dot #print (\"-\", end=\"\") # Occupied space else: # If you're on",
"destination. # def Move(color, source, destination): global CenterSpace moveDesc = color + \":",
"6 and marble in MagicCircle: return True # MAGIC CIRCLE HANDLING if marble",
"# # Move(color, source, destination): # # Move marble of color color from",
"cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif",
"elif finish in MagicCircle: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish,note))",
"all intermediate spaces, and destination space homeloc = destination - HOME # homeloc",
"If it is not me, then it is someone else badMove = 1",
"INTO HOME # NB: Add special cases for Blue, with start space of",
"and 6.\") Setup = 0 print (\"Preparing a %d player game.\" %NumPlayers) if",
"GetMoves (color, die) # # Return a list of the valid player options",
"list, so we can bop around by adding die values # to the",
"and I got a one, I can roll out to any # magic",
"loc - myStart # homeloc is home space # Move into Home if",
"\"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the color to",
"goes for the kill # White tried to be optimal, but sucked so",
"= 0 except ValueError: print (\"Please enter a number between 2 and 6.\")",
">= HOMESIZE: # Ran off the end of Home badMove = 1 elif",
"a 6 in magic # circle, that isn't bonking because it is me.",
"\" # Remove marble from source if source == CENTER: assert CenterSpace ==",
"+ \": \" # Remove marble from source if source == CENTER: assert",
"I # can enter the Center. marble+die-1 is equal to MagicCircle+1 # Entering",
"= \"\" if (finalspot in MagicCircle) or (Board[finalspot]): note += \"[\" if finalspot",
"BASE: if (die == 1 or die == 6) and (Board[Start[color]]!=color) and (1==firstStart):",
"\"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color codes for",
"else: badMove = 1 # Check regular spots after I left circle for",
"for i in range(0,BOARDSIZE): if i >=0 and i < 21: if i",
"continue if hp+i > HOMESIZE or hm[hp+i] == color: valid=0 continue if valid:",
"GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll be returning with ALL valid moves",
"is equal to MagicCircle+1 # Entering the Center space if destination == CENTER:",
"Number of Players # def Setup(): # Initialize the bases and colors for",
"# Leaving the Center space if marble == CENTER: if die==1 and Board[destination]",
"1 or die == 6) and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if",
"Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d ->",
"28, \"Purple\": 42, \"Green\": 56, \"White\": 70 } # # Roll(): # #",
"not special: note += \"Bonk \" + Board[finalspot] if finalspot in MagicCircle or",
"if Board[testloc%BOARDSIZE] == color: # Can't pass teammate badMove = 1 # End",
"= 1 elif pColor == \"Cyan\" or pColor == \"Purple\" or pColor ==",
"return True assert marble not in MagicCircle # MOVEMENT INTO HOME myStart =",
"# Show the initial game board while not GameOver: # Main game loop",
"user input menu for move in moves: strt, finish, note, distance = move",
"badMove = 1 else: # Still on the main board if Board[testloc%BOARDSIZE] ==",
"# Test the spaces between here and my final location for # teammates",
"= {} # Dict of each color's marble locations Players = [] #",
"Quick check to see if there's a teammate at the destination if destination",
"\")) GotInput = 1 if selection < 1 or selection > len(moves): print",
"numPlayers <= 0: robotMode = 1 numPlayers *= -1 # TkSetup() Display() #",
"range(0,die-i+1): # t is number of hops out of circle MoveToCheck = (circleExit",
"%s\" %(option, strt, finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d] Center -> %d",
"== 42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print it backwards print(\"\\n\")",
"roll is a 6 in magic # circle, that isn't bonking because it",
"space = Board[i] if space == \"\": # Use a * to indicate",
"+ Board[destination] + \"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] =",
"MagicCircle.index(destination-i) # Check all the magic spaces between where I entered # and",
"= 1 if pColor == \"Red\" or pColor == \"Purple\": # Blood shall",
"= MagicCircle.index(destination-i) # Check all the magic spaces between where I entered #",
"if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" & \" if Board[(dude+die)%BOARDSIZE]:",
"== BASE: if (die == 1 or die == 6) and (Board[Start[color]]!=color) and",
"#print (\"-\", end=\"\") # Occupied space else: # If you're on the magic",
"output[i] = chr(0x00A4) # cool circle thing # Use a # to indicate",
"return False assert marble != BASE # CENTER SPACE HANDLING # If my",
"%d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if not moves: print (\"No moves",
"here and my final location for # teammates for i in range(1,die+1): testloc",
"\"NORMAL\" MOVEMENT if marble not in MagicCircle and marble < BOARDSIZE and destination",
"again=1 if IsWinner(pColor): print (\"%s wins in %d turns!\" %(pColor, turnNum)) GameOver =",
"spots. All are 99. HOME=100 # \"Location\" for home spots - 100, 101,",
"== color: valid=0 continue if valid: if not ValidMove(dude, dude+die, die, color): assert",
"myStart = Start[color] if myStart == 0: # I have grown to hate",
"myStart and marble+die >= myStart: # Test the spaces between here and my",
"myStart == 0: # I have grown to hate Blue in this game",
"= -6 robotMode = 1 elif NumPlayers >= -6 and NumPlayers <= -2:",
"destination if destination < BOARDSIZE: if Board[destination] == color and marble != destination",
"not ValidMove(dude, dude+die, die, color): assert False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\"",
"input menu for move in moves: strt, finish, note, distance = move if",
"been setup? while not Setup: try: Setup=1 NumPlayers = int(input(\"How many players? \"))",
"die == 6 and marble in MagicCircle: return True # MAGIC CIRCLE HANDLING",
"Board[finalspot]: note += \"]\" if not ValidMove(dude, finalspot, die, color): assert False distance",
"assert die > 0 and die < 7 assert color # Quick check",
"\"White\": \"\\033[1;37;40m\", } # Reset the color to default creset=\"\\033[m\" output = [\"-\"",
"len(moves): print (\"That's not an option. Try again.\") GotInput = 0 except ValueError:",
"and where I exited for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color:",
"if Board[MagicCircle[j]] == color: if marble == destination and die == 6: return",
"Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers == -5: Players.append(\"Blue\")",
"& \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in",
"Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\")",
"if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen = chr(216) cen",
"output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10",
"spaces elif i in Start.values(): # What's this? I need to get the",
"it serves as a # check because I was having problems. :) I",
"\"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board = [\"\" for x in range(0,BOARDSIZE)]",
"in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if marble == destination and die",
"try: selection = int(input(pColor + \": Please select an option: \")) GotInput =",
"= BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance]) #",
"way return False else: # Still on the main board if Board[testloc] ==",
"\"Cyan\" or pColor == \"Purple\" or pColor == \"White\": # Always take the",
"a * to indicate magic circle if i in MagicCircle: #output[i] = \"*\"",
"a number between 2 and 6.\") Setup = 0 print (\"Preparing a %d",
"CenterSpace note += \"]\" if not ValidMove(dude, CENTER, die, color): assert False distance",
"+= \"]\" if not ValidMove(dude, finalspot, die, color): assert False distance = BOARDSIZE",
"- (finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT INTO",
"die, color): assert False distance = BOARDSIZE - (loc - Start[color]) % BOARDSIZE",
"# # This is pretty much a duplicate of GetMoves() but it serves",
"The destination is not in the magic circle, so walk # back to",
"# def IsWinner(color): win=1 for i in range(0, HOMESIZE): if Home[color][i] != color:",
"= 0 print (\"Preparing a %d player game.\" %NumPlayers) if NumPlayers == 2",
"loc, note, distance]) # Movement WITHIN Home elif dude >= HOME: hm =",
"past the MagicCircle, then I # can enter the Center. marble+die-1 is equal",
">= myStart: # Test the spaces between here and my final location for",
"== color: # Can't pass teammate badMove = 1 # End of for",
"NumPlayers > 6: print (\"Please enter a number between 2 and 6.\") Setup=0",
"distance = BOARDSIZE - (loc - Start[color]) % BOARDSIZE response.append([dude, loc, note, distance])",
"\"] -> \" else: assert Board[source] == color Board[source] = \"\" moveDesc +=",
"else: # Board destination is not the center or Home assert Board[destination] !=",
"to destination via die is valid # Returns True / False # #",
"marble is in Base, see if it can get out if dude ==",
"= MagicCircle.index(dude) # Lots of permutations for magic circle... for i in range(0,",
"6: print(\"%s rolled a 6! Take another turn.\" %pColor) again=1 if IsWinner(pColor): print",
"CenterSpace == color: return False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color:",
"GameOver: # Main game loop turnNum += 1 for p in range(0,numPlayers): again=1",
"on the main board if Board[testloc] == color: # Can't pass teammate return",
"center or Home assert Board[destination] != color moveDesc += \"\" + str(destination) +",
"return True # Leaving the Center space if marble == CENTER: if die==1",
"number between 2 and 6.\") Setup=0 except KeyError: print (\"Please enter a number",
"BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT INTO HOME # NB: Add special",
"# loc is destination space homeloc = loc - myStart # homeloc is",
"c, c, c, c] Home[c] = [ \"\", \"\", \"\", \"\"] # Where",
"Bonk(CENTER) CenterSpace = color elif destination >= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME]",
"Start[color]) or \\ (Start[color] == 0 and dude < Start[color]+BOARDSIZE and dude+die >=",
"elif i % 10 == 0: output[i] = str(i // 10) else: #output[i]",
"42 and i < 63: if i == 42: print (\"\\t\", end=\"\") print",
"center and I got a one, I can roll out to any #",
"and i < 21: if i == 0: print (\"\\t\", end=\"\") print(output[i], end=\"\")",
"have grown to hate Blue in this game myStart = BOARDSIZE if marble",
"== 6) and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if Board[Start[color]]: note +=",
"this is working. # def ValidMove(marble, destination, die, color): # print (\"[Entering] ValidMove(src=%d,",
"if marble == BASE: assert destination == Start[color] if (die == 1 or",
"-> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d]",
"# Red always goes for the kill # White tried to be optimal,",
"bonking if destination is not empty if Board[destination]: moveDesc += \"Bonk \" +",
"Base -> Start %s\" %(option,note)) else: if finish == CENTER: print (\"\\t[%d] %d",
"return bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200,",
"circle MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle case",
"of the valid player options with a die roll # def GetMoves(color,die): assert",
"> 0 and die < 7 assert color in Colors assert color in",
"possible moves firstStart=1 # Only want to add Start once for dude in",
"2 or NumPlayers > 6: print (\"Please enter a number between 2 and",
"!= color moveDesc += \"\" + str(destination) + \" \" # Deal with",
"1 # Cyan takes option 1 # Purple kills # Green picks randomly",
"= int(input(pColor + \": Please select an option: \")) GotInput = 1 if",
"I can continue normal track, or # hop one magic circle space and",
"in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue if not selfPass:",
"space and then continue the normal # track, or hope 2 magic circle",
"to MagicCircle+1 # Entering the Center space if destination == CENTER: assert marble+die-1",
"\"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board = [\"\" for x in",
"== color Board[source] = \"\" moveDesc += \"\" + str(source) + \" ->",
"CenterSpace = color elif destination >= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] =",
"# 6 in magic circle means I can land on myself if mc",
"so walk # back to the nearest magic circle space, checking # that",
"distance]) # MOVEMENT INTO HOME # NB: Add special cases for Blue, with",
"int between 1 and 6 # def Roll(): return randint(1,6) # # Display():",
"if marble not in MagicCircle and marble < BOARDSIZE and destination < BOARDSIZE:",
"#output[i] = (\"-\") output[i] = chr(0x00B7) # A nice dot #print (\"-\", end=\"\")",
"TypeError: print (\"Bad input\") GotInput = 0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor):",
"} # Start[color] : space# Start = { \"Blue\": 0, \"Red\": 14, \"Cyan\":",
"Base = {} # Dict of each color's base status Home = {}",
"track, or ... if dude in MagicCircle: # circleNum is the index of",
"hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # #",
"< Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] == 0 and dude",
"for i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE",
"die, color): assert False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE]",
"break if not selection: selection = 1 elif pColor == \"Cyan\" or pColor",
"MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for i in range(1,die): if",
"%(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt, finish-HOME+1,",
"can be very # frustrating! # from random import randint #import tkinter #",
"die, color)) assert die > 0 and die < 7 assert color #",
"note, BOARDSIZE]) firstStart=0 continue else: continue # # Handle \"regular\" motion starting here:",
"that can be very # frustrating! # from random import randint #import tkinter",
"# Where are my marbles? All your base are belong to us. Marbles[c]",
"\"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4) # cool circle thing # Use",
"teammate # 6 in magic circle means I can land on myself if",
"CENTER: if die==1: for i in MagicCircle: if Board[i] != color: note =",
"List of active players # Marbles[color] : { location0, location1, location2, location3 }",
"spaces into Home? if testloc >= HOMESIZE: # Ran off the end of",
"WITHIN Home if marble >= HOME: assert marble < HOME+HOMESIZE assert destination >=",
"Base[color].count(color) > 0 Base[color].remove(color) # The destination is that color's start destination =",
"return True assert False return False # Something insane happened? # Movement WITHIN",
"CENTER, die, color): assert False distance = BOARDSIZE - 8 response.append([dude, CENTER, note,",
"the game over turnNum = 0 robotMode = 0 # A human is",
"marble to destination via die is valid # Returns True / False #",
"sub_li # # GetMoves (color, die) # # Return a list of the",
"Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" & \" if",
"# Checked all intermediate spaces, and destination space homeloc = destination - HOME",
"range(0,numPlayers): again=1 # Flag for when a player rolls a 6 while again:",
"roll out to any # magic circle space if dude == CENTER: if",
"\"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\",",
"MAGIC CIRCLE HANDLING # If I'm in the magic circle, I can continue",
"end=\"\") print (\"Base:\\t\", end=\"\") for b in Base[p]: if b == \"\": #print",
"space == CENTER: deadGuy = CenterSpace else: deadGuy = Board[space] Board[space] = \"\"",
"from marble to destination via die is valid # Returns True / False",
"Show the initial game board while not GameOver: # Main game loop turnNum",
"Counter for the user input menu for move in moves: strt, finish, note,",
"is your home? Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ]",
"49, 63, 77 ] # Locations for the magic circle spaces Base =",
"have it here only. # But, you know, this is working. # def",
"if CenterSpace == color: return False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] ==",
"select an option: \")) GotInput = 1 if selection < 1 or selection",
"= ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if i >=0 and",
"# and where I exited for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] ==",
"color's base status Home = {} # Dict of each color's home status",
"def Move(color, source, destination): global CenterSpace moveDesc = color + \": \" #",
"any previous note # If this marble is in Base, see if it",
"if Board[finalspot] and not special: note += \"Bonk \" + Board[finalspot] if finalspot",
"Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color] if myStart == 0: # HACK",
"be very # frustrating! # from random import randint #import tkinter # GLOBALS",
"badMove = 1 # End of for i in range(1,die) if not badMove:",
"badMove = 1 # Check regular spots after I left circle for t",
"(ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): # # Gets the board ready for",
"\"Home[\" + str(destination-HOME+1) + \"]\" else: # Board destination is not the center",
"HOMESIZE): valid=0 continue if hp+i > HOMESIZE or hm[hp+i] == color: valid=0 continue",
"a duplicate of GetMoves() but it serves as a # check because I",
"MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE # Now verify that I didn't pass",
"dude != CENTER # MAGIC CIRCLE HANDLING # If I'm in the magic",
"# ValidMove (marble, destination, die) # # Check if the move from marble",
"cases for Blue, with start space of 0, # because of modulo problems.",
"strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note))",
"an int between 1 and 6 # def Roll(): return randint(1,6) # #",
"i in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection = i+1 print (\"Kill!\", moves[i])",
"with bonking if destination is not empty if Board[destination]: moveDesc += \"Bonk \"",
"(dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die,",
"status Home = {} # Dict of each color's home status Marbles =",
"of Home return False elif Home[color][testloc]: # somebody in the way return False",
"== \"Purple\" or pColor == \"White\": # Always take the first option selection",
"creset=\"\\033[m\" output = [\"-\" for x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space",
"Blue is the player .. or she chooses 1 # Deckard is a",
"return False # # SortMoves(myList) # # Used by .sorted to return lists",
"special: note += \"Bonk \" + Board[finalspot] if finalspot in MagicCircle or Board[finalspot]:",
"homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc,",
"print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif strt == CENTER:",
"= loc - myStart # homeloc is home space # Move into Home",
"# print (\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note =\"\" # Just",
"home space # Move into Home if homeloc >= 0 and homeloc <",
"= \"[Start\" if Board[Start[color]]: note += \" & Bonk \" + Board[Start[color]] note",
"Marbles = {} # Dict of each color's marble locations Players = []",
"False distance = BOARDSIZE - (i - Start[color]) % BOARDSIZE response.append([dude, i, note,",
"< 63: if i == 42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\") #",
"order def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0])",
"flow GotInput = 1 for i in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection",
"Deal with bonking if destination is not empty if Board[destination]: moveDesc += \"Bonk",
"move from marble to destination via die is valid # Returns True /",
"\"Purple\": 42, \"Green\": 56, \"White\": 70 } # # Roll(): # # Roll",
"6: return False # If this marble is in Base, see if it",
"assert False distance = BOARDSIZE - 8 response.append([dude, CENTER, note, distance]) # If",
"hp means Home Position valid=1 for i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0",
"= (circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle case where I",
"won. Returns True/False # def IsWinner(color): win=1 for i in range(0, HOMESIZE): if",
"the board ready for a new game, and assigns player colors. # Returns:",
"return False return True # Catch all assert False return False # #",
"0 # Red always goes for the kill # White tried to be",
"to be optimal, but sucked so now takes 1 # Cyan takes option",
"destination space homeloc = destination - HOME # homeloc is (potential) home space",
"!= CENTER assert destination != CENTER # Special case of 6 in the",
"\"\" moveDesc += \"\" + str(source) + \" -> \" # Deal with",
"Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\")",
"Start[color], die, color): assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue",
"destination space homeloc = loc - myStart # homeloc is home space #",
"if Home[color][i] != color: win=0 break return bool(win) def TkSetup(): root = tkinter.Tk()",
"me to one past the MagicCircle, then I # can enter the Center.",
"# def Main(): GameOver = 0 # Is the game over turnNum =",
"(\"*\", end=\"\") output[i] = chr(0x00A4) # cool circle thing # Use a #",
"BASE, BASE, BASE ] robotMode = 0 Setup = 0 # Has the",
"Number of space around the main track CENTER=98 # \"Location\" of the center",
"or NumPlayers == -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or",
"the state of the board. # XXX: This could be replaced with Tk",
"robotMode and pColor == \"Blue\": selection = 1 GotInput = 1 if pColor",
"color): assert False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] !=",
"and Board[destination] != color: return True else: return False assert marble != CENTER",
"if marble < myStart and marble+die >= myStart: # Test the spaces between",
"# Use a * to indicate magic circle if i in MagicCircle: #output[i]",
"the magic circle ending where you start if marble == destination and die",
"get out if marble == BASE: assert destination == Start[color] if (die ==",
"i < 42: if i == 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else:",
"canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def Main(): GameOver = 0 #",
"print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and i",
"testloc = marble+i if testloc >= myStart: # testloc is in the Home",
"another turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s wins in %d turns!\" %(pColor,",
"return False response = Move(pColor, src, dst) Display() print (response) if myRoll ==",
"Marbles[color].append(destination) return moveDesc # # ValidMove (marble, destination, die) # # Check if",
"# # Prints out the state of the board. # XXX: This could",
"die < 7 assert color in Colors assert color in Players # print",
"# # An implementation of the classic marble board game that can be",
"CENTER: if die==1 and Board[destination] != color: return True else: return False assert",
"MagicCircle and marble < BOARDSIZE and destination < BOARDSIZE: for i in range(1,die):",
"in MagicCircle: note += \"Magic Circle\" if finalspot in MagicCircle and Board[finalspot] and",
"moveDesc # # ValidMove (marble, destination, die) # # Check if the move",
"into Home if homeloc >= 0 and homeloc < HOMESIZE: if not ValidMove(dude,",
"means Home[color] hp = dude-HOME # hp means Home Position valid=1 for i",
"if(hp+i >= HOMESIZE): valid=0 continue if hp+i > HOMESIZE or hm[hp+i] == color:",
"Reset the color to default creset=\"\\033[m\" output = [\"-\" for x in range(0,BOARDSIZE)]",
"# White tried to be optimal, but sucked so now takes 1 #",
"for h in Home[p]: if h == \"\": #print (\"-\", end=\"\") print (chr(0x00B7),",
"randint #import tkinter # GLOBALS BOARDSIZE = 84 # Number of space around",
"if h == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset,",
"root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # #",
"color CenterSpace = \"\" moveDesc += \"[Center] -> \" elif source == BASE:",
"if not badMove and testloc >= myStart: # testloc is in the Home",
"MagicCircle, then I # can enter the Center. dude+die-1 is equal to MagicCircle+1",
"i >=0 and i < 21: if i == 0: print (\"\\t\", end=\"\")",
"CenterSpace = \"\" MagicCircle = [ 7, 21, 35, 49, 63, 77 ]",
"HANDLING # If my roll can take me to one past the MagicCircle,",
"Home[color][destination-HOME] = color moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\" else: # Board",
"the Home zone testloc -= myStart # How many spaces into Home? if",
"in MagicCircle: if Board[i] != color: note = \"[Magic Circle\" if Board[i]: note",
"to indicate magic circle if i in MagicCircle: #output[i] = \"*\" #print (\"*\",",
"note = \"[Magic Circle\" if Board[i]: note += \" & Bonk \" +",
"number between 2 and 6.\") Setup = 0 print (\"Preparing a %d player",
"print (\"Kill!\", moves[i]) break if not selection: selection = 1 elif pColor ==",
"if color has won. Returns True/False # def IsWinner(color): win=1 for i in",
"< BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False return",
"ANSI color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\":",
"home spots - 100, 101, 102, 103 HOMESIZE=4 # How big is your",
"# Setup(): # # Gets the board ready for a new game, and",
"CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen = chr(216) cen =",
"# # Used by .sorted to return lists in order def SortMoves(sub_li): sub_li.sort(key",
"where you start if marble == destination and die == 6 and marble",
"#print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 == 0: output[i] = str(i //",
"Returns an int between 1 and 6 # def Roll(): return randint(1,6) #",
"color: if marble == destination and die == 6: return False return True",
"and i < 63: if i == 42: print (\"\\t\", end=\"\") print (output[104-i],",
"hop one magic circle space and then continue the normal # track, or",
"testloc = dude+i if not badMove and testloc >= myStart: # testloc is",
"in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection = i+1 print (\"Kill!\", moves[i]) break",
"for i in range(1,die+1): if(hp+i >= HOMESIZE): return False if hp+i > HOMESIZE",
"Move(pColor, src, dst) Display() print (response) if myRoll == 6: print(\"%s rolled a",
"marble in MagicCircle: return True # MAGIC CIRCLE HANDLING if marble in MagicCircle:",
"= \"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4) # cool circle thing #",
"# from random import randint #import tkinter # GLOBALS BOARDSIZE = 84 #",
"56, \"White\": 70 } # # Roll(): # # Roll a die. #",
"off the end of Home return False elif Home[color][testloc]: # somebody in the",
"is someone else badMove = 1 if t==0: # The magic circle is",
"t==0: # The magic circle is poisoned from # here on out.. circleBlock",
"and marble in MagicCircle: return True # MAGIC CIRCLE HANDLING if marble in",
"+ \" \" # Deal with bonking if destination is not empty if",
"List that we'll be returning with ALL valid moves response = [] #",
"= destination - HOME # homeloc is (potential) home space # Move into",
"Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d] Center ->",
"(potential) home space # Move into Home if homeloc >= 0 and homeloc",
"# # Setup(): # # Gets the board ready for a new game,",
"valid moves response = [] # For each marble, figure out all possible",
"Add this to the list # Special processing: If the roll is a",
"Setup = 0 print (\"Preparing a %d player game.\" %NumPlayers) if NumPlayers ==",
"- 8 response.append([dude, CENTER, note, distance]) # If I'm in the center and",
"+ Board[i] note += \"]\" if not ValidMove(dude, i, die, color): assert False",
"# Lots of permutations for magic circle... for i in range(0, die+1): circleExit",
"strt, finish-HOME+1, note)) elif strt == CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note))",
"I entered # and where I exited for j in range(magicStart, magicDestination+1): if",
"bases and colors for c in Colors: Base[c] = [ c, c, c,",
"not badMove and testloc >= myStart: # testloc is in the Home zone",
"loc = dude+die # loc is destination space homeloc = loc - myStart",
"# Flag for when a player rolls a 6 while again: again=0 pColor",
"# If I'm in the magic circle, I can continue normal track, or",
"with start space of 0, # because of modulo problems. elif (dude <",
"False return False # # SortMoves(myList) # # Used by .sorted to return",
"circle means I can land on myself if mc == 6: pass else:",
"Home Position valid=1 for i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if",
"if CenterSpace: note += \" & Bonk \" + CenterSpace note += \"]\"",
"distance = BOARDSIZE - (i - Start[color]) % BOARDSIZE response.append([dude, i, note, distance])",
"turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s wins in %d turns!\" %(pColor, turnNum))",
"color # Quick check to see if there's a teammate at the destination",
"%s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc #",
"Use a * to indicate magic circle if i in MagicCircle: #output[i] =",
"print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish,note)) else: print (\"\\t[%d] %d",
"Where are my marbles? All your base are belong to us. Marbles[c] =",
"# Always take the first option selection = 1 GotInput = 1 elif",
"Remove marble from source if source == CENTER: assert CenterSpace == color CenterSpace",
"1 continue if not selfPass: note = \"\" if (dude+die)%BOARDSIZE in MagicCircle or",
"color: return False return True assert marble not in MagicCircle # MOVEMENT INTO",
"in Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll be",
"the magic circle, so walk # back to the nearest magic circle space,",
"GetMoves(pColor, myRoll) if not moves: print (\"No moves available.\") continue GotInput = 0",
"startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 == 0: output[i] = str(i",
"MagicCircle if CenterSpace == color: return False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE]",
"from GetMoves and have it here only. # But, you know, this is",
"else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and i < 63: if i",
"the initial game board while not GameOver: # Main game loop turnNum +=",
"the color to default creset=\"\\033[m\" output = [\"-\" for x in range(0,BOARDSIZE)] for",
"input\") GotInput = 0 except TypeError: print (\"Bad input\") GotInput = 0 src,dst,note,distance",
"103 HOMESIZE=4 # How big is your home? Colors = [ \"Blue\", \"Red\",",
"homeloc is (potential) home space # Move into Home if homeloc >= 0",
"continue else: continue # # Handle \"regular\" motion starting here: # # CENTER",
"from choices # Blue is the player .. or she chooses 1 #",
"track CENTER=98 # \"Location\" of the center of death. BASE=99 # \"Location\" for",
"= BOARDSIZE if marble < myStart and marble+die >= myStart: # Test the",
"Bonk \" + CenterSpace note += \"]\" if not ValidMove(dude, CENTER, die, color):",
"True assert marble not in MagicCircle # MOVEMENT INTO HOME myStart = Start[color]",
"in range(1,die+1): testloc = marble+i if testloc >= myStart: # testloc is in",
"return False return True else: # The destination is not in the magic",
"in Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for b in Base[p]:",
"Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk! # # send a guy back",
"GetMoves(color,die): assert die > 0 and die < 7 assert color in Colors",
"Dict of each color's base status Home = {} # Dict of each",
"pColor == \"Cyan\" or pColor == \"Purple\" or pColor == \"White\": # Always",
"color: selfPass = 1 continue if not selfPass: note = \"\" if (dude+die)%BOARDSIZE",
"upper case # letter if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset,",
"{} # Dict of each color's home status Marbles = {} # Dict",
"# MOVEMENT INTO HOME myStart = Start[color] if myStart == 0: # I",
"implementation of the classic marble board game that can be very # frustrating!",
"%(option,note)) else: if finish == CENTER: print (\"\\t[%d] %d -> Center %s\" %(option,strt,note))",
"of each color's home status Marbles = {} # Dict of each color's",
"in Start.values(): # What's this? I need to get the color given the",
"ValidMove(marble, destination, die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination,",
"insane happened? # Movement WITHIN Home if marble >= HOME: assert marble <",
"the # normal track, or ... if dude in MagicCircle: # circleNum is",
"magic circle spaces Base = {} # Dict of each color's base status",
"for i in range(1,die+1): if Board[dude+i] == color: yep=0 if yep: note =",
"(dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] == 0 and",
"or # hop one magic circle space and then continue the normal #",
"space # Move into Home if homeloc >= 0 and homeloc < HOMESIZE:",
"finish == CENTER: print (\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif finish in",
"all possible moves firstStart=1 # Only want to add Start once for dude",
"# Special case of 6 in the magic circle ending where you start",
"game.\" %NumPlayers) if NumPlayers == 2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif",
"assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc += \"Home[\" + str(destination-HOME+1) +",
"# # Display(): # # Prints out the state of the board. #",
"+= \"[Center] \" if CenterSpace: print (\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc",
"the kill # White tried to be optimal, but sucked so now takes",
"magic circle, so walk # back to the nearest magic circle space, checking",
"Move into Home if homeloc >= 0 and homeloc < HOMESIZE: return True",
"color: # Can't pass teammate badMove = 1 # End of for i",
">= 0 and homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color): assert",
"the bases and colors for c in Colors: Base[c] = [ c, c,",
"# Check all the magic spaces between where I entered # and where",
"here only. # But, you know, this is working. # def ValidMove(marble, destination,",
"range(1,die+1): if(hp+i >= HOMESIZE): return False if hp+i > HOMESIZE or hm[hp+i] ==",
"not ValidMove(dude, finalspot, die, color): assert False distance = BOARDSIZE - (finalspot -",
"output[i] = str(i // 10) else: #output[i] = (\"-\") output[i] = chr(0x00B7) #",
"Check regular spots after I left circle for t in range(0,die-i+1): # t",
"Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) # # IsWinner(color)",
"special cases for Blue, with start space of 0, # because of modulo",
"%s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42 and i < 63:",
"if myRoll == 6: print(\"%s rolled a 6! Take another turn.\" %pColor) again=1",
"Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk! # #",
"Home if homeloc >= 0 and homeloc < HOMESIZE: return True assert False",
"print (\"\\t\", end=\"\") print(output[i], end=\"\") if i == 20: print() elif i >=",
"one past the MagicCircle, then I # can enter the Center. marble+die-1 is",
"color: yep=1 for i in range(1,die+1): if Board[dude+i] == color: yep=0 if yep:",
"print (\"\\t[%d] Base -> Start %s\" %(option,note)) else: if finish == CENTER: print",
"elif NumPlayers == 3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers",
"dest=%d, die=%d, color=%s)\" %(marble, destination, die, color)) assert die > 0 and die",
"Remove the marble from the base assert Base[color].count(color) > 0 Base[color].remove(color) # The",
"where I exited for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if",
"# End where I started special=1 note = \"\" if (finalspot in MagicCircle)",
"borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def Main(): GameOver =",
"circle, that isn't bonking because it is me. special=0 if dude == finalspot:",
"die=%d) - Check %d\" %(color,die,dude)) note =\"\" # Just in case, clear out",
"HOME+HOMESIZE assert destination >= HOME hm = Home[color] # hm means Home[color] hp",
"i+1 print (\"Kill!\", moves[i]) break if not selection: selection = 1 elif pColor",
"== color: yep=0 if yep: note = \"[Center\" if CenterSpace: note += \"",
"list magicStart = MagicCircle.index(marble) for i in range(0,die+1): if destination-i in MagicCircle: magicDestination",
"Display(): # Color! # ANSI color codes for the marbles ccode={ # [magic];Attrib;FG;BGm",
"c, c, c] Home[c] = [ \"\", \"\", \"\", \"\"] # Where are",
"Base[c] = [ c, c, c, c] Home[c] = [ \"\", \"\", \"\",",
"-5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\")",
"to any # magic circle space if dude == CENTER: if die==1: for",
"# Dict of each color's base status Home = {} # Dict of",
"# What's this? I need to get the color given the # value.",
"1 # End of for i in range(1,die) if not badMove: # Valid",
"note += \"]\" if not ValidMove(dude, finalspot, die, color): assert False distance =",
"== color: # Passed through teammate # 6 in magic circle means I",
"# Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >= 42",
"myStart = Start[color] if myStart == 0: # HACK for Blue with start",
"the classic marble board game that can be very # frustrating! # from",
"end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): # # Gets the",
"note, distance]) # Movement WITHIN Home elif dude >= HOME: hm = Home[color]",
"the index in that list magicStart = MagicCircle.index(marble) for i in range(0,die+1): if",
"# GetMoves (color, die) # # Return a list of the valid player",
"# Cyan takes option 1 # Purple kills # Green picks randomly from",
"Players.append(\"Green\") return NumPlayers # # Bonk! # # send a guy back to",
"7 assert color # Quick check to see if there's a teammate at",
"in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space = Board[i] if space == \"\":",
"Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk! # # send",
"src, dst) Display() print (response) if myRoll == 6: print(\"%s rolled a 6!",
"color Home[color][destination-HOME] = color moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\" else: #",
"return True return False assert marble != BASE # CENTER SPACE HANDLING #",
"# teammates for i in range(1,die+1): testloc = marble+i if testloc >= myStart:",
"-= myStart # How many spaces into Home? if testloc >= HOMESIZE: #",
"+= \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE in",
"BASE: print (\"\\t[%d] Base -> Start %s\" %(option,note)) else: if finish == CENTER:",
"not to play.\") NumPlayers = -6 robotMode = 1 elif NumPlayers >= -6",
"response = [] # For each marble, figure out all possible moves firstStart=1",
"in %d turns!\" %(pColor, turnNum)) GameOver = 1 return # We're out of",
"while again: again=0 pColor = Players[p] myRoll = Roll() print (\"\\n%s rolled: %d\\n\"",
"space else: # If you're on the magic circle, you get an upper",
"if marble == CENTER: if die==1 and Board[destination] != color: return True else:",
"strt, finish,note)) else: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish, note))",
"Bonk \" + Board[i] note += \"]\" if not ValidMove(dude, i, die, color):",
"# Still on the Board elif loc < myStart: if Board[loc]: note =",
"= 1 numPlayers *= -1 # TkSetup() Display() # Show the initial game",
"and dude+die >= Start[color]+BOARDSIZE): badMove = 0 myStart = Start[color] if myStart ==",
"False # Something insane happened? # Movement WITHIN Home if marble >= HOME:",
">= HOME: assert marble < HOME+HOMESIZE assert destination >= HOME hm = Home[color]",
"42: if i == 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen =",
"range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space = Board[i] if space == \"\": #",
"#import tkinter # GLOBALS BOARDSIZE = 84 # Number of space around the",
"& Bonk \" + Board[i] note += \"]\" if not ValidMove(dude, i, die,",
"< 1 or selection > len(moves): print (\"That's not an option. Try again.\")",
"BOARDSIZE response.append([dude, loc, note, distance]) # Movement WITHIN Home elif dude >= HOME:",
"10 == 0: output[i] = str(i // 10) else: #output[i] = (\"-\") output[i]",
"== CENTER: assert CenterSpace == color CenterSpace = \"\" moveDesc += \"[Center] ->",
"& Bonk \" + CenterSpace note += \"]\" if not ValidMove(dude, CENTER, die,",
"\\ (Start[color] == 0 and dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove",
"0 except TypeError: print (\"Please enter a number between 2 and 6.\") Setup",
"# \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for i in",
"distance = BOARDSIZE - 8 response.append([dude, CENTER, note, distance]) # If I'm in",
"HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\" + str(source-HOME+1) + \"] -> \"",
"= 0 # A human is needed numPlayers = Setup() if numPlayers <=",
"the main track CENTER=98 # \"Location\" of the center of death. BASE=99 #",
"def GetMoves(color,die): assert die > 0 and die < 7 assert color in",
"at the destination if destination < BOARDSIZE: if Board[destination] == color and marble",
"the Board elif loc < myStart: if Board[loc]: note = \"[Bonk \" +",
"valid player options with a die roll # def GetMoves(color,die): assert die >",
": { location0, location1, location2, location3 } # Start[color] : space# Start =",
"%s\" %(option,finish,note)) elif strt == BASE: print (\"\\t[%d] Base -> Start %s\" %(option,note))",
"(\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d] %d",
"teammates for i in range(1,die+1): testloc = marble+i if testloc >= myStart: #",
"== \"Cyan\" or pColor == \"Purple\" or pColor == \"White\": # Always take",
"(color, die) # # Return a list of the valid player options with",
"marble not in MagicCircle and marble < BOARDSIZE and destination < BOARDSIZE: for",
"again.\") GotInput = 0 except ValueError: if len(moves) == 1: selection = 1",
"source, destination): global CenterSpace moveDesc = color + \": \" # Remove marble",
"+ (die-i))%BOARDSIZE # Now verify that I didn't pass a teammate between dude",
"Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers",
"color elif destination >= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color moveDesc",
"BASE # CENTER SPACE HANDLING # If my roll can take me to",
"MagicCircle and Board[finalspot] and not special: note += \" & \" if Board[finalspot]",
"board. # XXX: This could be replaced with Tk or something else. #",
"True # Leaving the Center space if marble == CENTER: if die==1 and",
"the magic circle, you get an upper case # letter if i in",
"# Still on the main board if Board[testloc] == color: # Can't pass",
"badMove: # Add this to the list # Special processing: If the roll",
"print (\"Please enter a number between 2 and 6.\") Setup = 0 print",
"Take a random option selection = randint(1,len(moves)) GotInput = 1 while not GotInput:",
"into Home if homeloc >= 0 and homeloc < HOMESIZE: return True assert",
"(Start[color] == 0 and dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove =",
"# Valid moves only loc = dude+die # loc is destination space homeloc",
"(chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): # # Gets",
"-4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers == -5:",
"somebody in the way return False else: # Still on the main board",
"indicate start spaces elif i in Start.values(): # What's this? I need to",
"valid=0 continue if valid: if not ValidMove(dude, dude+die, die, color): assert False response.append([dude,",
"Initialize the bases and colors for c in Colors: Base[c] = [ c,",
">=0 and i < 21: if i == 0: print (\"\\t\", end=\"\") print(output[i],",
"not the center or Home assert Board[destination] != color moveDesc += \"\" +",
"a teammate at the destination if destination < BOARDSIZE: if Board[destination] == color",
"- Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! # print (\"[Leaving]",
"in MagicCircle and Board[finalspot] and not special: note += \" & \" if",
"left circle for t in range(0,die-i+1): # t is number of hops out",
"HANDLING # If I'm in the magic circle, I can continue normal track,",
"for p in range(0,numPlayers): again=1 # Flag for when a player rolls a",
"marble+die-1 in MagicCircle if CenterSpace == color: return False for i in range(1,die+1):",
"- Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT INTO HOME #",
"selection < 1 or selection > len(moves): print (\"That's not an option. Try",
"# Color! # ANSI color codes for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\":",
"%s\" %(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d] %d -> %d %s\" %(option,",
"magicStart = MagicCircle.index(marble) for i in range(0,die+1): if destination-i in MagicCircle: magicDestination =",
"Move marble of color color from source to destination. # def Move(color, source,",
"HANDLING if marble in MagicCircle: # magicStart is the index of where we",
"game myStart = BOARDSIZE if marble < myStart and marble+die >= myStart: #",
"Can't pass teammate badMove = 1 # End of for i in range(1,die)",
"pColor == \"Purple\": # Blood shall flow GotInput = 1 for i in",
"= 1 else: print (\"Bad input\") GotInput = 0 except TypeError: print (\"Bad",
"MagicCircle+1 # Entering the Center space if destination == CENTER: assert marble+die-1 in",
"zone testloc -= myStart # How many spaces into Home? if testloc >=",
"each marble, figure out all possible moves firstStart=1 # Only want to add",
"player colors. # Returns: Number of Players # def Setup(): # Initialize the",
"else: #cen = \"-\" #cen = chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t",
"height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def Main(): GameOver",
"is destination space homeloc = loc - myStart # homeloc is home space",
"end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i in range(0,BOARDSIZE): if",
"(circleExit + (die-i))%BOARDSIZE # Now verify that I didn't pass a teammate between",
"out any previous note # If this marble is in Base, see if",
"False # Checked all intermediate spaces, and destination space homeloc = destination -",
"\"\\033[1;37;40m\", } # Reset the color to default creset=\"\\033[m\" output = [\"-\" for",
"< BOARDSIZE: if Board[destination] == color and marble != destination and die !=",
"one magic circle space and then continue the normal # track, or hope",
"if t==0: # The magic circle is poisoned from # here on out..",
"the first option selection = 1 GotInput = 1 elif pColor == \"Green\":",
"GotInput = 0 except ValueError: if len(moves) == 1: selection = 1 GotInput",
"testloc >= HOMESIZE: # Ran off the end of Home return False elif",
"# An implementation of the classic marble board game that can be very",
"Board[testloc] == color: # Can't pass teammate return False # Checked all intermediate",
"+ \" -> \" # Deal with possible destinations if destination == CENTER:",
"poisoned from # here on out.. circleBlock = 1 continue if circleBlock: continue",
"/ False # # This is pretty much a duplicate of GetMoves() but",
"\" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color elif destination >= HOME:",
"(\"Bad input\") GotInput = 0 except TypeError: print (\"Bad input\") GotInput = 0",
"Passed through teammate # 6 in magic circle means I can land on",
"(\"Bad input\") GotInput = 0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR:",
"color): assert False distance = BOARDSIZE - (loc - Start[color]) % BOARDSIZE response.append([dude,",
"This is pretty much a duplicate of GetMoves() but it serves as a",
"{} # Dict of each color's marble locations Players = [] # List",
"ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor,",
"NumPlayers < 2 or NumPlayers > 6: print (\"Please enter a number between",
"print (\"Base:\\t\", end=\"\") for b in Base[p]: if b == \"\": #print (\"-\",",
"MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace != color: yep=1 for i in",
"an option: \")) GotInput = 1 if selection < 1 or selection >",
"Center. marble+die-1 is equal to MagicCircle+1 # Entering the Center space if destination",
"20: print() elif i >= 21 and i < 42: if i ==",
"Home zone testloc -= myStart # How many spaces into Home? if testloc",
"magic circle means I can land on myself if mc == 6: pass",
"# to indicate start spaces elif i in Start.values(): # What's this? I",
"0: output[i] = str(i // 10) else: #output[i] = (\"-\") output[i] = chr(0x00B7)",
"if i == 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\"",
"# MAGIC CIRCLE HANDLING if marble in MagicCircle: # magicStart is the index",
"valid=0 continue if hp+i > HOMESIZE or hm[hp+i] == color: valid=0 continue if",
"<= -2: print (\"Like tears in rain.\") robotMode = 1 elif NumPlayers <",
"and 6.\") Setup = 0 except ValueError: print (\"Please enter a number between",
"motion starting here: # # CENTER SPACE HANDLING # If my roll can",
"circleNum = MagicCircle.index(dude) # Lots of permutations for magic circle... for i in",
"note =\"\" # Just in case, clear out any previous note # If",
"# Can't pass teammate badMove = 1 # End of for i in",
"# Initialize the bases and colors for c in Colors: Base[c] = [",
"dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0",
"# cool circle thing # Use a # to indicate start spaces elif",
"%d\" %(color,die,dude)) note =\"\" # Just in case, clear out any previous note",
"= 1 continue if circleBlock: continue if not badMove: # Add this to",
"print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d",
"here: # # CENTER SPACE HANDLING # If my roll can take me",
"= \"\" moveDesc += \"[Center] -> \" elif source == BASE: # Remove",
"= [ c, c, c, c] Home[c] = [ \"\", \"\", \"\", \"\"]",
"past the MagicCircle, then I # can enter the Center. dude+die-1 is equal",
"(\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die, color)) assert die > 0",
"then it is someone else badMove = 1 if t==0: # The magic",
"moves[i]) break if not selection: selection = 1 elif pColor == \"Cyan\" or",
"color: return False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False",
"in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if hp+i > HOMESIZE or hm[hp+i]",
"Always take the first option selection = 1 GotInput = 1 elif pColor",
"if Board[Start[color]]: note += \" & Bonk \" + Board[Start[color]] note += \"]\"",
"starting here: # # CENTER SPACE HANDLING # If my roll can take",
"# Reset the color to default creset=\"\\033[m\" output = [\"-\" for x in",
"a bunch of casting black magic to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)]",
"+ Board[finalspot] if finalspot in MagicCircle or Board[finalspot]: note += \"]\" if not",
"-6 robotMode = 1 elif NumPlayers >= -6 and NumPlayers <= -2: print",
"TkSetup() Display() # Show the initial game board while not GameOver: # Main",
"\" & Bonk \" + CenterSpace note += \"]\" if not ValidMove(dude, CENTER,",
"# This is pretty much a duplicate of GetMoves() but it serves as",
"!= color moveDesc += \"[Center] \" if CenterSpace: print (\"Bonk! %s hits %s!\"",
"= Players[p] myRoll = Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves =",
"color: return False return True # Leaving the Center space if marble ==",
"0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\"",
"play.\") NumPlayers = -6 robotMode = 1 elif NumPlayers >= -6 and NumPlayers",
"equal to MagicCircle+1 # Entering the Center space if destination == CENTER: assert",
"# MAGIC CIRCLE HANDLING # If I'm in the magic circle, I can",
"marble+die-1 is equal to MagicCircle+1 # Entering the Center space if destination ==",
"if destination == CENTER: assert CenterSpace != color moveDesc += \"[Center] \" if",
"if yep: note = \"[Center\" if CenterSpace: note += \" & Bonk \"",
"note = \"[Bonk \" + Board[loc] + \"]\" if not ValidMove(dude, loc, die,",
"destination via die is valid # Returns True / False # # This",
"to hate Blue in this game myStart = BOARDSIZE if marble < myStart",
"if testloc >= HOMESIZE: # Ran off the end of Home badMove =",
"of space around the main track CENTER=98 # \"Location\" of the center of",
"dude+die, die, color): assert False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif",
"\"Location\" of the center of death. BASE=99 # \"Location\" for base spots. All",
"10) else: #output[i] = (\"-\") output[i] = chr(0x00B7) # A nice dot #print",
"h == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\")",
"Setup = 0 except ValueError: print (\"Please enter a number between 2 and",
"x: x[0]) return sub_li # # GetMoves (color, die) # # Return a",
"walk # back to the nearest magic circle space, checking # that walk.",
"77 ] # Locations for the magic circle spaces Base = {} #",
"Home assert Board[destination] != color moveDesc += \"\" + str(destination) + \" \"",
"and NumPlayers <= -2: print (\"Like tears in rain.\") robotMode = 1 elif",
"elif loc < myStart: if Board[loc]: note = \"[Bonk \" + Board[loc] +",
"the MagicCircle, then I # can enter the Center. dude+die-1 is equal to",
"+= \"Home[\" + str(destination-HOME+1) + \"]\" else: # Board destination is not the",
"marble == BASE: assert destination == Start[color] if (die == 1 or die",
"True else: # The destination is not in the magic circle, so walk",
"die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on the Board",
"many players? \")) if NumPlayers == 0: print (\"The only way to win",
"Occupied space else: # If you're on the magic circle, you get an",
"\" & Bonk \" + Board[i] note += \"]\" if not ValidMove(dude, i,",
"= Move(pColor, src, dst) Display() print (response) if myRoll == 6: print(\"%s rolled",
"# SortMoves(myList) # # Used by .sorted to return lists in order def",
"\" & \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE",
"dude+die-1 is equal to MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace != color:",
"casting black magic to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset",
"\"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the",
"i in range(0,BOARDSIZE): if i >=0 and i < 21: if i ==",
"== CENTER: assert marble+die-1 in MagicCircle if CenterSpace == color: return False for",
"(dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note",
"Setup=1 NumPlayers = int(input(\"How many players? \")) if NumPlayers == 0: print (\"The",
"else: # The destination is not in the magic circle, so walk #",
"I have grown to hate Blue in this game myStart = BOARDSIZE if",
"# Entering the Center space if destination == CENTER: assert marble+die-1 in MagicCircle",
"your home? Colors = [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board",
"MagicCircle) or (Board[finalspot]): note += \"[\" if finalspot in MagicCircle: note += \"Magic",
"of 0 myStart = BOARDSIZE for i in range(1,die+1): testloc = dude+i if",
"in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color):",
"str(source-HOME+1) + \"] -> \" else: assert Board[source] == color Board[source] = \"\"",
"# Remove the marble from the base assert Base[color].count(color) > 0 Base[color].remove(color) #",
"rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if not moves: print (\"No",
"str(source) + \" -> \" # Deal with possible destinations if destination ==",
"= startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i",
"# Now verify that I didn't pass a teammate between dude # and",
"means Home[color] hp = marble-HOME # hp means Home Position for i in",
"of the center of death. BASE=99 # \"Location\" for base spots. All are",
"NumPlayers = -6 robotMode = 1 elif NumPlayers >= -6 and NumPlayers <=",
"grown to hate Blue in this game myStart = BOARDSIZE if marble <",
"# circle, that isn't bonking because it is me. special=0 if dude ==",
"# and out badMove=0 circleBlock=0 # Check magic circle spots I traversed for",
"magic circle if i in MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\") output[i]",
"print (\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print it backwards print(\"\\n\") for p",
"21: if i == 0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if i ==",
"destination - HOME # homeloc is (potential) home space # Move into Home",
"print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die, color)) assert die >",
"-3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers == -4: Players.append(\"Blue\")",
"in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [ 7, 21, 35, 49, 63,",
"Deal with possible destinations if destination == CENTER: assert CenterSpace != color moveDesc",
"(\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk \" + CenterSpace +",
"= MagicCircle.index(marble) for i in range(0,die+1): if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i)",
"is not in the magic circle, so walk # back to the nearest",
"is in Base, see if it can get out if dude == BASE:",
"\"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for i in range(1,die):",
"\"[\" if finalspot in MagicCircle: note += \"Magic Circle\" if finalspot in MagicCircle",
"assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE,",
"0 myStart = Start[color] if myStart == 0: # HACK for Blue with",
"(dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] == 0 and dude < Start[color]+BOARDSIZE and",
"NumPlayers == 3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers ==",
"out of circle MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color: #",
"between 2 and 6.\") Setup = 0 except TypeError: print (\"Please enter a",
"CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif strt == BASE: print (\"\\t[%d]",
"int(input(\"How many players? \")) if NumPlayers == 0: print (\"The only way to",
"if pColor == \"Red\" or pColor == \"Purple\": # Blood shall flow GotInput",
"True / False # # This is pretty much a duplicate of GetMoves()",
"Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\")",
"assert False distance = BOARDSIZE - (i - Start[color]) % BOARDSIZE response.append([dude, i,",
"space of 0, # because of modulo problems. elif (dude < Start[color] and",
"0: # HACK for Blue with start of 0 myStart = BOARDSIZE for",
"== 6: print(\"%s rolled a 6! Take another turn.\" %pColor) again=1 if IsWinner(pColor):",
"> 6: print (\"Please enter a number between 2 and 6.\") Setup=0 except",
"between 2 and 6.\") Setup = 0 print (\"Preparing a %d player game.\"",
"HOME # homeloc is (potential) home space # Move into Home if homeloc",
"- Check %d\" %(color,die,dude)) note =\"\" # Just in case, clear out any",
"+= \"Bonk \" + Board[finalspot] if finalspot in MagicCircle or Board[finalspot]: note +=",
"Center -> %d %s\" %(option,finish,note)) elif strt == BASE: print (\"\\t[%d] Base ->",
"valid=1 for i in range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if hp+i >",
"where I started special=1 note = \"\" if (finalspot in MagicCircle) or (Board[finalspot]):",
"global CenterSpace moveDesc = color + \": \" # Remove marble from source",
"Leaving the Center space if marble == CENTER: if die==1 and Board[destination] !=",
"= [\"-\" for x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space = Board[i]",
"circle spaces Base = {} # Dict of each color's base status Home",
"Home[color][i] != color: win=0 break return bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\")",
"\"Cyan\", \"Purple\", \"Green\", \"White\" ] Board = [\"\" for x in range(0,BOARDSIZE)] CenterSpace",
"CENTER=98 # \"Location\" of the center of death. BASE=99 # \"Location\" for base",
"of each color's base status Home = {} # Dict of each color's",
"> 0 Base[color].remove(color) # The destination is that color's start destination = Start[color]",
"for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if marble == destination",
"[ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board = [\"\" for x",
"it is me. special=0 if dude == finalspot: # End where I started",
"note)) else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif strt",
"bop around by adding die values # to the index in that list",
"then I # can enter the Center. marble+die-1 is equal to MagicCircle+1 #",
"Setup=0 except KeyError: print (\"Please enter a number between 2 and 6.\") Setup",
"distance]) # Movement WITHIN Home elif dude >= HOME: hm = Home[color] #",
"strt == CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif strt == BASE:",
"elif source >= HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\" + str(source-HOME+1) +",
"0: # I have grown to hate Blue in this game myStart =",
"\"[Magic Circle\" if Board[i]: note += \" & Bonk \" + Board[i] note",
"pColor = Players[p] myRoll = Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves",
"False distance = BOARDSIZE - 8 response.append([dude, CENTER, note, distance]) # If I'm",
"in MagicCircle) or (Board[finalspot]): note += \"[\" if finalspot in MagicCircle: note +=",
"Tk or something else. # def Display(): # Color! # ANSI color codes",
"assert color in Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that",
"and then continue the normal # track, or hope 2 magic circle space",
"to one past the MagicCircle, then I # can enter the Center. marble+die-1",
"== CENTER: if die==1 and Board[destination] != color: return True else: return False",
"def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return",
"= chr(0x00A4) # cool circle thing # Use a # to indicate start",
"\"Green\": 56, \"White\": 70 } # # Roll(): # # Roll a die.",
"out badMove=0 circleBlock=0 # Check magic circle spots I traversed for mc in",
"die. # Returns an int between 1 and 6 # def Roll(): return",
"verify that I didn't pass a teammate between dude # and out badMove=0",
"the player .. or she chooses 1 # Deckard is a replicant! if",
"(chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in Home[p]:",
"= int(input(\"How many players? \")) if NumPlayers == 0: print (\"The only way",
"# # Bonk! # # send a guy back to base # def",
"die, color): assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue #",
"color Board[source] = \"\" moveDesc += \"\" + str(source) + \" -> \"",
"Only want to add Start once for dude in Marbles[color]: # print (\"[]",
"= BOARDSIZE - 8 response.append([dude, CENTER, note, distance]) # If I'm in the",
"= [ \"Blue\", \"Red\", \"Cyan\", \"Purple\", \"Green\", \"White\" ] Board = [\"\" for",
"if finalspot in MagicCircle or Board[finalspot]: note += \"]\" if not ValidMove(dude, finalspot,",
"+ CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color elif destination >= HOME: assert",
"Still on the Board elif loc < myStart: if Board[loc]: note = \"[Bonk",
"in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False return True # Catch all",
"moveDesc += \"[Center] -> \" elif source == BASE: # Remove the marble",
"by adding die values # to the index in that list magicStart =",
"x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li # # GetMoves (color, die)",
"finalspot: # End where I started special=1 note = \"\" if (finalspot in",
"and (1==firstStart): note = \"[Start\" if Board[Start[color]]: note += \" & Bonk \"",
"if not ValidMove(dude, i, die, color): assert False distance = BOARDSIZE - (i",
"magic circle ending where you start if marble == destination and die ==",
"myStart # How many spaces into Home? if testloc >= HOMESIZE: # Ran",
"start space of 0, # because of modulo problems. elif (dude < Start[color]",
"if \"Bonk\" in moves[i][2]: selection = i+1 print (\"Kill!\", moves[i]) break if not",
"Home = {} # Dict of each color's home status Marbles = {}",
"marble not in MagicCircle # MOVEMENT INTO HOME myStart = Start[color] if myStart",
"= Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if",
"6 and want to do # a full revolution if dude != MoveToCheck:",
"while not GameOver: # Main game loop turnNum += 1 for p in",
"if NumPlayers == 2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers ==",
"color in Colors assert color in Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die))",
"[ \"\", \"\", \"\", \"\"] # Where are my marbles? All your base",
"for the magic circle spaces Base = {} # Dict of each color's",
"(\"\\t[%d] Base -> Start %s\" %(option,note)) else: if finish == CENTER: print (\"\\t[%d]",
"# magic circle space if dude == CENTER: if die==1: for i in",
"locations Players = [] # List of active players # Marbles[color] : {",
": space# Start = { \"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42,",
"# Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) # #",
"randint(1,len(moves)) GotInput = 1 while not GotInput: option=1 # Counter for the user",
"first option selection = 1 GotInput = 1 elif pColor == \"Green\": #",
"(\"Base:\\t\", end=\"\") for b in Base[p]: if b == \"\": #print (\"-\", end=\"\")",
"i in range(0,die+1): if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all",
"moveDesc += \"\" + str(source) + \" -> \" # Deal with possible",
"ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color])",
"= startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 ==",
"note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note",
"magic to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] =",
"start spaces elif i in Start.values(): # What's this? I need to get",
"from source if source == CENTER: assert CenterSpace == color CenterSpace = \"\"",
"\" elif source >= HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\" + str(source-HOME+1)",
"circle thing # Use a # to indicate start spaces elif i in",
"- (i - Start[color]) % BOARDSIZE response.append([dude, i, note, distance]) continue assert dude",
"here on out.. circleBlock = 1 continue if circleBlock: continue if not badMove:",
"# Blood shall flow GotInput = 1 for i in range(0,len(moves)): if \"Bonk\"",
"(\"Please enter a number between 2 and 6.\") Setup = 0 except TypeError:",
"# Counter for the user input menu for move in moves: strt, finish,",
"== \"\": # Use a * to indicate magic circle if i in",
"Colors: Base[c] = [ c, c, c, c] Home[c] = [ \"\", \"\",",
"adding die values # to the index in that list circleNum = MagicCircle.index(dude)",
"End of for i in range(1,die) if not badMove: # Valid moves only",
"\"[\" if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle",
"Start = { \"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56,",
"print (\"Bad input\") GotInput = 0 except TypeError: print (\"Bad input\") GotInput =",
"testloc -= myStart # How many spaces into Home? if testloc >= HOMESIZE:",
"# \"Location\" of the center of death. BASE=99 # \"Location\" for base spots.",
"we are in the magic # circle list, so we can bop around",
"# For each marble, figure out all possible moves firstStart=1 # Only want",
"continue assert dude != CENTER # MAGIC CIRCLE HANDLING # If I'm in",
"colors for c in Colors: Base[c] = [ c, c, c, c] Home[c]",
"Base, see if it can get out if dude == BASE: if (die",
"Board[destination-i] == color: return False return True assert marble not in MagicCircle #",
"GotInput = 0 except TypeError: print (\"Bad input\") GotInput = 0 src,dst,note,distance =",
"Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination): # #",
"out if marble == BASE: assert destination == Start[color] if (die == 1",
"assert destination >= HOME hm = Home[color] # hm means Home[color] hp =",
"2 or NumPlayers == -2: Players.append(\"Blue\") Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers",
"# I have grown to hate Blue in this game myStart = BOARDSIZE",
"the Center space if marble == CENTER: if die==1 and Board[destination] != color:",
"regular spots after I left circle for t in range(0,die-i+1): # t is",
"%d %s\" %(option, strt, finish, note)) option+=1 try: selection = int(input(pColor + \":",
"MOVEMENT INTO HOME # NB: Add special cases for Blue, with start space",
"in MagicCircle: # circleNum is the index of where we are in the",
"return sub_li # # GetMoves (color, die) # # Return a list of",
"turns!\" %(pColor, turnNum)) GameOver = 1 return # We're out of here! Main()",
"human is needed numPlayers = Setup() if numPlayers <= 0: robotMode = 1",
"and marble+die >= myStart: # Test the spaces between here and my final",
"else: # Still on the main board if Board[testloc] == color: # Can't",
"pColor == \"Blue\": selection = 1 GotInput = 1 if pColor == \"Red\"",
"circle space and then continue the # normal track, or ... if dude",
"Board[space] Board[space] = \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination): #",
"Board[i] if space == \"\": # Use a * to indicate magic circle",
"\"]\" else: # Board destination is not the center or Home assert Board[destination]",
"hp = dude-HOME # hp means Home Position valid=1 for i in range(1,die+1):",
"out if dude == BASE: if (die == 1 or die == 6)",
"== 6 and marble in MagicCircle: return True # MAGIC CIRCLE HANDLING if",
"BOARDSIZE if marble < myStart and marble+die >= myStart: # Test the spaces",
"-> \" # Deal with possible destinations if destination == CENTER: assert CenterSpace",
"letter if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i]",
"Home[c] = [ \"\", \"\", \"\", \"\"] # Where are my marbles? All",
"Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]:",
"it can get out if dude == BASE: if (die == 1 or",
"normal track, or # hop one magic circle space and then continue the",
"space and then continue the # normal track, or ... if dude in",
"+= \"[\" if finalspot in MagicCircle: note += \"Magic Circle\" if finalspot in",
"i in MagicCircle: if Board[i] != color: note = \"[Magic Circle\" if Board[i]:",
"end=\"\") print() # # Setup(): # # Gets the board ready for a",
"be replaced with Tk or something else. # def Display(): # Color! #",
"print (\"That's not an option. Try again.\") GotInput = 0 except ValueError: if",
"if source == CENTER: assert CenterSpace == color CenterSpace = \"\" moveDesc +=",
"destination and die != 6: return False # If this marble is in",
"if space == \"\": # Use a * to indicate magic circle if",
"only. # But, you know, this is working. # def ValidMove(marble, destination, die,",
"on myself if mc == 6: pass else: badMove = 1 # Check",
"WITHIN Home elif dude >= HOME: hm = Home[color] # hm means Home[color]",
"%d, %s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor, src, dst) Display() print (response)",
"< 7 assert color in Colors assert color in Players # print (\"[Entering]",
"t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle case where I roll a 6",
"[] # List of active players # Marbles[color] : { location0, location1, location2,",
"player rolls a 6 while again: again=0 pColor = Players[p] myRoll = Roll()",
"Move into Home if homeloc >= 0 and homeloc < HOMESIZE: if not",
"%d %s\" %(option, strt, finish,note)) else: print (\"\\t[%d] %d -> %d %s\" %(option,",
"by adding die values # to the index in that list circleNum =",
"if (die == 1 or die == 6) and (Board[Start[color]]!=color): return True return",
"# Use a # to indicate start spaces elif i in Start.values(): #",
"# Catch all assert False return False # # SortMoves(myList) # # Used",
"know, this is working. # def ValidMove(marble, destination, die, color): # print (\"[Entering]",
"if Board[i] != color: note = \"[Magic Circle\" if Board[i]: note += \"",
"Start[color]) % BOARDSIZE response.append([dude, i, note, distance]) continue assert dude != CENTER #",
"Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination): # # Move marble of color",
"list # Special processing: If the roll is a 6 in magic #",
"GetMoves() but it serves as a # check because I was having problems.",
"testloc is in the Home zone testloc -= myStart # How many spaces",
"your base are belong to us. Marbles[c] = [BASE, BASE, BASE, BASE ]",
"assert marble < HOME+HOMESIZE assert destination >= HOME hm = Home[color] # hm",
"Handle case where I roll a 6 and want to do # a",
"def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\")",
"die, color): assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE",
"\"[Bonk \" + Board[loc] + \"]\" if not ValidMove(dude, loc, die, color): assert",
"= [ 7, 21, 35, 49, 63, 77 ] # Locations for the",
"hp+i > HOMESIZE or hm[hp+i] == color: return False return True # \"NORMAL\"",
"of Players # def Setup(): # Initialize the bases and colors for c",
"\"]\" if not ValidMove(dude, finalspot, die, color): assert False distance = BOARDSIZE -",
"[ c, c, c, c] Home[c] = [ \"\", \"\", \"\", \"\"] #",
"NumPlayers # # Bonk! # # send a guy back to base #",
"& \" if Board[finalspot] and not special: note += \"Bonk \" + Board[finalspot]",
"available.\") continue GotInput = 0 selection = 0 # Red always goes for",
"if destination == CENTER: assert marble+die-1 in MagicCircle if CenterSpace == color: return",
"+ Board[loc] + \"]\" if not ValidMove(dude, loc, die, color): assert False distance",
"# Marbles[color] : { location0, location1, location2, location3 } # Start[color] : space#",
"[ 7, 21, 35, 49, 63, 77 ] # Locations for the magic",
"# Passed through teammate # 6 in magic circle means I can land",
"homeloc < HOMESIZE: return True assert False return False # Something insane happened?",
"if mc == 6: pass else: badMove = 1 # Check regular spots",
"CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color elif destination >= HOME: assert Home[color][destination-HOME]",
"is pretty much a duplicate of GetMoves() but it serves as a #",
"\"]\" if not ValidMove(dude, loc, die, color): assert False distance = BOARDSIZE -",
"+ str(destination) + \" \" # Deal with bonking if destination is not",
"Start[color] : space# Start = { \"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\":",
"#print (\"*\", end=\"\") output[i] = chr(0x00A4) # cool circle thing # Use a",
"= color elif destination >= HOME: assert Home[color][destination-HOME] != color Home[color][destination-HOME] = color",
"GotInput = 1 for i in range(0,len(moves)): if \"Bonk\" in moves[i][2]: selection =",
"option selection = 1 GotInput = 1 elif pColor == \"Green\": # Take",
"# Can't pass teammate return False # Checked all intermediate spaces, and destination",
"selection > len(moves): print (\"That's not an option. Try again.\") GotInput = 0",
"range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [ 7, 21, 35, 49, 63, 77",
"== 1 or die == 6) and (Board[Start[color]]!=color): return True return False assert",
"if the move from marble to destination via die is valid # Returns",
"== \"Purple\": # Blood shall flow GotInput = 1 for i in range(0,len(moves)):",
"# can enter the Center. marble+die-1 is equal to MagicCircle+1 # Entering the",
"magic circle space and then continue the # normal track, or ... if",
"for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\":",
"# def Setup(): # Initialize the bases and colors for c in Colors:",
"input\") GotInput = 0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d,",
"marble != destination and die != 6: return False # If this marble",
"i < 21: if i == 0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if",
"6 in the magic circle ending where you start if marble == destination",
"return moveDesc # # ValidMove (marble, destination, die) # # Check if the",
"codes for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\",",
"2 and 6.\") Setup = 0 print (\"Preparing a %d player game.\" %NumPlayers)",
"of 6 in the magic circle ending where you start if marble ==",
"badMove = 1 elif Home[color][testloc]: # somebody in the way badMove = 1",
"\"\" if (finalspot in MagicCircle) or (Board[finalspot]): note += \"[\" if finalspot in",
"+= \" & Bonk \" + CenterSpace note += \"]\" if not ValidMove(dude,",
"a # check because I was having problems. :) I should probably remove",
"note # If this marble is in Base, see if it can get",
"didn't pass a teammate between dude # and out badMove=0 circleBlock=0 # Check",
"or selection > len(moves): print (\"That's not an option. Try again.\") GotInput =",
"Home[color][source-HOME] = \"\" moveDesc += \"Home[\" + str(source-HOME+1) + \"] -> \" else:",
"in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE # Now",
"numPlayers *= -1 # TkSetup() Display() # Show the initial game board while",
"from the base assert Base[color].count(color) > 0 Base[color].remove(color) # The destination is that",
"note = \"[Center\" if CenterSpace: note += \" & Bonk \" + CenterSpace",
"print (\"\\tHome:\\t\", end=\"\") for h in Home[p]: if h == \"\": #print (\"-\",",
"between where I entered # and where I exited for j in range(magicStart,",
"case, clear out any previous note # If this marble is in Base,",
"color: valid=0 continue if valid: if not ValidMove(dude, dude+die, die, color): assert False",
"= 0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d,",
"not in MagicCircle and marble < BOARDSIZE and destination < BOARDSIZE: for i",
"location2, location3 } # Start[color] : space# Start = { \"Blue\": 0, \"Red\":",
"for i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False return True #",
"Add special cases for Blue, with start space of 0, # because of",
"return True else: return False assert marble != CENTER assert destination != CENTER",
"die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE # Now verify that",
"dude == finalspot: # End where I started special=1 note = \"\" if",
"# If this marble is in Base, see if it can get out",
"= (\"-\") output[i] = chr(0x00B7) # A nice dot #print (\"-\", end=\"\") #",
"print (\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d]",
"dot #print (\"-\", end=\"\") # Occupied space else: # If you're on the",
"= [] # List of active players # Marbles[color] : { location0, location1,",
"return lists in order def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key =",
"except TypeError: print (\"Please enter a number between 2 and 6.\") Setup =",
"%s\" %(option, strt, finish,note)) else: print (\"\\t[%d] %d -> %d %s\" %(option, strt,",
"distance]) continue assert dude != CENTER # MAGIC CIRCLE HANDLING # If I'm",
"if circleBlock: continue if not badMove: # Add this to the list #",
"# Ran off the end of Home badMove = 1 elif Home[color][testloc]: #",
"# to the index in that list magicStart = MagicCircle.index(marble) for i in",
"Setup(): # # Gets the board ready for a new game, and assigns",
"for x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space = Board[i] if space",
"width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def Main():",
"(\"%s wins in %d turns!\" %(pColor, turnNum)) GameOver = 1 return # We're",
"in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print",
"spots after I left circle for t in range(0,die-i+1): # t is number",
"take me to one past the MagicCircle, then I # can enter the",
"False # # This is pretty much a duplicate of GetMoves() but it",
"is working. # def ValidMove(marble, destination, die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d,",
"= moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return",
"MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic",
"< 42: if i == 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen",
"HOMESIZE: # Ran off the end of Home return False elif Home[color][testloc]: #",
"if there's a teammate at the destination if destination < BOARDSIZE: if Board[destination]",
"return False elif Home[color][testloc]: # somebody in the way return False else: #",
"# # IsWinner(color) # # Determine if color has won. Returns True/False #",
"the marble from the base assert Base[color].count(color) > 0 Base[color].remove(color) # The destination",
"in MagicCircle and marble < BOARDSIZE and destination < BOARDSIZE: for i in",
"\"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", }",
"while not Setup: try: Setup=1 NumPlayers = int(input(\"How many players? \")) if NumPlayers",
"response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on the Board elif loc < myStart:",
"!= 6: return False # If this marble is in Base, see if",
"dude # and out badMove=0 circleBlock=0 # Check magic circle spots I traversed",
"note)) option+=1 try: selection = int(input(pColor + \": Please select an option: \"))",
"%(color,die)) # List that we'll be returning with ALL valid moves response =",
"\" & \" if Board[finalspot] and not special: note += \"Bonk \" +",
"destination is not in the magic circle, so walk # back to the",
"= \"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE",
"1 else: # Still on the main board if Board[testloc%BOARDSIZE] == color: #",
"Board[i]: note += \" & Bonk \" + Board[i] note += \"]\" if",
"= 0 # Red always goes for the kill # White tried to",
"and marble < BOARDSIZE and destination < BOARDSIZE: for i in range(1,die): if",
"5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\")",
"# Roll(): # # Roll a die. # Returns an int between 1",
"assert False distance = BOARDSIZE - (loc - Start[color]) % BOARDSIZE response.append([dude, loc,",
"# NB: Add special cases for Blue, with start space of 0, #",
"end of Home badMove = 1 elif Home[color][testloc]: # somebody in the way",
"(loc - Start[color]) % BOARDSIZE response.append([dude, loc, note, distance]) # Movement WITHIN Home",
"== -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\")",
"BASE: assert destination == Start[color] if (die == 1 or die == 6)",
"if Board[loc]: note = \"[Bonk \" + Board[loc] + \"]\" if not ValidMove(dude,",
"!= color: return True else: return False assert marble != CENTER assert destination",
"color: yep=0 if yep: note = \"[Center\" if CenterSpace: note += \" &",
"0 # Has the game been setup? while not Setup: try: Setup=1 NumPlayers",
"to return lists in order def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key",
"# letter if i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else:",
"distance = BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance])",
"die == 6) and (Board[Start[color]]!=color): return True return False assert marble != BASE",
"-> \" elif source == BASE: # Remove the marble from the base",
"location0, location1, location2, location3 } # Start[color] : space# Start = { \"Blue\":",
"# Returns an int between 1 and 6 # def Roll(): return randint(1,6)",
"else: return False assert marble != CENTER assert destination != CENTER # Special",
"is (potential) home space # Move into Home if homeloc >= 0 and",
"finalspot, die, color): assert False distance = BOARDSIZE - (finalspot - Start[color]) %",
"\": Please select an option: \")) GotInput = 1 if selection < 1",
"Blue in this game myStart = BOARDSIZE if marble < myStart and marble+die",
"# Number of space around the main track CENTER=98 # \"Location\" of the",
"\"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } #",
"continue # # Handle \"regular\" motion starting here: # # CENTER SPACE HANDLING",
"way to win is not to play.\") NumPlayers = -6 robotMode = 1",
"are 99. HOME=100 # \"Location\" for home spots - 100, 101, 102, 103",
"# Start[color] : space# Start = { \"Blue\": 0, \"Red\": 14, \"Cyan\": 28,",
"cool circle thing # Use a # to indicate start spaces elif i",
"in this game myStart = BOARDSIZE if marble < myStart and marble+die >=",
"out all possible moves firstStart=1 # Only want to add Start once for",
"note += \"]\" if not ValidMove(dude, CENTER, die, color): assert False distance =",
"or hm[hp+i] == color: valid=0 continue if valid: if not ValidMove(dude, dude+die, die,",
"if marble == destination and die == 6 and marble in MagicCircle: return",
"valid: if not ValidMove(dude, dude+die, die, color): assert False response.append([dude, dude+die, \"[Home]\", 0])",
"Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk!",
"off the end of Home badMove = 1 elif Home[color][testloc]: # somebody in",
"source, destination): # # Move marble of color color from source to destination.",
"x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space = Board[i] if space ==",
"color's start destination = Start[color] moveDesc += \"[Base] -> \" elif source >=",
"and Board[(dude+die)%BOARDSIZE]: note += \" & \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \"",
"Is the game over turnNum = 0 robotMode = 0 # A human",
"\" # Deal with bonking if destination is not empty if Board[destination]: moveDesc",
"an option. Try again.\") GotInput = 0 except ValueError: if len(moves) == 1:",
"str(destination-HOME+1) + \"]\" else: # Board destination is not the center or Home",
"Special processing: If the roll is a 6 in magic # circle, that",
"# Locations for the magic circle spaces Base = {} # Dict of",
"\"Location\" for home spots - 100, 101, 102, 103 HOMESIZE=4 # How big",
"HOMESIZE or hm[hp+i] == color: return False return True # \"NORMAL\" MOVEMENT if",
"== color: return False return True # Catch all assert False return False",
"print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor)) return False response = Move(pColor, src,",
"if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate # 6 in magic circle",
"of the classic marble board game that can be very # frustrating! #",
"\"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } #",
"a full revolution if dude != MoveToCheck: # If it is not me,",
"== color: return False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return",
"finish in MagicCircle: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish,note)) else:",
"assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue # # Handle",
"< BOARDSIZE and destination < BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE] ==",
"0 # A human is needed numPlayers = Setup() if numPlayers <= 0:",
"\"!\" Bonk(CENTER) CenterSpace = color elif destination >= HOME: assert Home[color][destination-HOME] != color",
"// 10) else: #output[i] = (\"-\") output[i] = chr(0x00B7) # A nice dot",
"die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die, color))",
"MAGIC CIRCLE HANDLING if marble in MagicCircle: # magicStart is the index of",
"# Check magic circle spots I traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]]",
"response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue # # Handle \"regular\" motion",
"magic circle... for i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit",
"1 continue if circleBlock: continue if not badMove: # Add this to the",
"not ValidMove(dude, loc, die, color): assert False distance = BOARDSIZE - (loc -",
"if destination < BOARDSIZE: if Board[destination] == color and marble != destination and",
"+ \"] -> \" else: assert Board[source] == color Board[source] = \"\" moveDesc",
"if i >=0 and i < 21: if i == 0: print (\"\\t\",",
"finalspot in MagicCircle: note += \"Magic Circle\" if finalspot in MagicCircle and Board[finalspot]",
"the list # Special processing: If the roll is a 6 in magic",
"= dude-HOME # hp means Home Position valid=1 for i in range(1,die+1): if(hp+i",
"destination, die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die,",
"- Start[color]) % BOARDSIZE response.append([dude, i, note, distance]) continue assert dude != CENTER",
"\"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color codes for start",
"note, distance]) # If I'm in the center and I got a one,",
"for magic circle... for i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot =",
"or pColor == \"Purple\" or pColor == \"White\": # Always take the first",
"\"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance = BOARDSIZE -",
"= 0 robotMode = 0 # A human is needed numPlayers = Setup()",
"Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk! # # send a",
"Start[color] if (die == 1 or die == 6) and (Board[Start[color]]!=color): return True",
"Lots of permutations for magic circle... for i in range(0, die+1): circleExit =",
"return False # If this marble is in Base, see if it can",
"destination and die == 6 and marble in MagicCircle: return True # MAGIC",
"that I didn't pass a teammate between dude # and out badMove=0 circleBlock=0",
"is equal to MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace != color: yep=1",
"for i in range(1,die) if not badMove: # Valid moves only loc =",
"picks randomly from choices # Blue is the player .. or she chooses",
"!= destination and die != 6: return False # If this marble is",
"main board if Board[testloc%BOARDSIZE] == color: # Can't pass teammate badMove = 1",
"(\"No moves available.\") continue GotInput = 0 selection = 0 # Red always",
"problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] ==",
"-> %d %s\" %(option, strt, finish, note)) option+=1 try: selection = int(input(pColor +",
"tried to be optimal, but sucked so now takes 1 # Cyan takes",
"option selection = randint(1,len(moves)) GotInput = 1 while not GotInput: option=1 # Counter",
"\"\" MagicCircle = [ 7, 21, 35, 49, 63, 77 ] # Locations",
"Move(color, source, destination): # # Move marble of color color from source to",
"0 # Is the game over turnNum = 0 robotMode = 0 #",
"to the index in that list circleNum = MagicCircle.index(dude) # Lots of permutations",
"Bonk! # # send a guy back to base # def Bonk(space): if",
"= i+1 print (\"Kill!\", moves[i]) break if not selection: selection = 1 elif",
"startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i %",
"range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if marble == destination and die ==",
"final location for # teammates for i in range(1,die+1): testloc = marble+i if",
"Now verify that I didn't pass a teammate between dude # and out",
"in that list circleNum = MagicCircle.index(dude) # Lots of permutations for magic circle...",
"= BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) #",
"0 and die < 7 assert color in Colors assert color in Players",
"else: continue # # Handle \"regular\" motion starting here: # # CENTER SPACE",
"MoveToCheck: # If it is not me, then it is someone else badMove",
"# Bonk! # # send a guy back to base # def Bonk(space):",
"intermediate spaces, and destination space homeloc = destination - HOME # homeloc is",
"in MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note",
"marble == CENTER: if die==1 and Board[destination] != color: return True else: return",
"in MagicCircle and CenterSpace != color: yep=1 for i in range(1,die+1): if Board[dude+i]",
"I can land on myself if mc == 6: pass else: badMove =",
"we'll be returning with ALL valid moves response = [] # For each",
"1 GotInput = 1 elif pColor == \"Green\": # Take a random option",
"moves response = [] # For each marble, figure out all possible moves",
"marble board game that can be very # frustrating! # from random import",
"6 while again: again=0 pColor = Players[p] myRoll = Roll() print (\"\\n%s rolled:",
"(1==firstStart): note = \"[Start\" if Board[Start[color]]: note += \" & Bonk \" +",
"state of the board. # XXX: This could be replaced with Tk or",
"# # Return a list of the valid player options with a die",
"moveDesc += \"[Center] \" if CenterSpace: print (\"Bonk! %s hits %s!\" %(color, CenterSpace))",
"elif (dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] == 0",
"= \"[Bonk \" + Board[loc] + \"]\" if not ValidMove(dude, loc, die, color):",
"Prints out the state of the board. # XXX: This could be replaced",
"63, 77 ] # Locations for the magic circle spaces Base = {}",
"if space == CENTER: deadGuy = CenterSpace else: deadGuy = Board[space] Board[space] =",
"can land on myself if mc == 6: pass else: badMove = 1",
"can continue normal track, or # hop one magic circle space and then",
"spaces Base = {} # Dict of each color's base status Home =",
"== 0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if i == 20: print() elif",
"== -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers == -4:",
"\"[Center] \" if CenterSpace: print (\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc +=",
"I started special=1 note = \"\" if (finalspot in MagicCircle) or (Board[finalspot]): note",
"on the main board if Board[testloc%BOARDSIZE] == color: # Can't pass teammate badMove",
"output[i] = startColor[thiscolor]+chr(0x033F)+creset #print (startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 == 0: output[i]",
"Flag for when a player rolls a 6 while again: again=0 pColor =",
"0]) # Still on the Board elif loc < myStart: if Board[loc]: note",
"MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4) # cool circle",
"= [BASE, BASE, BASE, BASE ] robotMode = 0 Setup = 0 #",
"finalspot = (circleExit + (die-i))%BOARDSIZE # Now verify that I didn't pass a",
"teammate between dude # and out badMove=0 circleBlock=0 # Check magic circle spots",
"Setup: try: Setup=1 NumPlayers = int(input(\"How many players? \")) if NumPlayers == 0:",
"if Board[destination] == color and marble != destination and die != 6: return",
"i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False return True # Catch",
"main track CENTER=98 # \"Location\" of the center of death. BASE=99 # \"Location\"",
"= 84 # Number of space around the main track CENTER=98 # \"Location\"",
"# Returns: Number of Players # def Setup(): # Initialize the bases and",
"Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE] ==",
"= move if finish >= HOME: if strt >= HOME: print(\"\\t[%d] Home[%d] ->",
"source >= HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\" + str(source-HOME+1) + \"]",
"GotInput = 1 if selection < 1 or selection > len(moves): print (\"That's",
"Movement WITHIN Home elif dude >= HOME: hm = Home[color] # hm means",
"HOMESIZE=4 # How big is your home? Colors = [ \"Blue\", \"Red\", \"Cyan\",",
"== CENTER: if die==1: for i in MagicCircle: if Board[i] != color: note",
"MagicCircle # MOVEMENT INTO HOME myStart = Start[color] if myStart == 0: #",
"= BOARDSIZE - (i - Start[color]) % BOARDSIZE response.append([dude, i, note, distance]) continue",
"and not special: note += \" & \" if Board[finalspot] and not special:",
"destination == CENTER: assert marble+die-1 in MagicCircle if CenterSpace == color: return False",
"Home badMove = 1 elif Home[color][testloc]: # somebody in the way badMove =",
"print (response) if myRoll == 6: print(\"%s rolled a 6! Take another turn.\"",
"= \"\" MagicCircle = [ 7, 21, 35, 49, 63, 77 ] #",
"= \"\" moveDesc += \"Home[\" + str(source-HOME+1) + \"] -> \" else: assert",
"range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return False return True # Catch all assert",
"0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if i == 20: print() elif i",
"print (\"Bad input\") GotInput = 0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print",
"(finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT INTO HOME",
"0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for i",
"\"]\" if not ValidMove(dude, Start[color], die, color): assert False response.append([dude, Start[color], note, BOARDSIZE])",
"XXX: This could be replaced with Tk or something else. # def Display():",
"!= CENTER # Special case of 6 in the magic circle ending where",
"frustrating! # from random import randint #import tkinter # GLOBALS BOARDSIZE = 84",
"Board destination is not the center or Home assert Board[destination] != color moveDesc",
"1 while not GotInput: option=1 # Counter for the user input menu for",
"0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\": 70 } #",
"takes option 1 # Purple kills # Green picks randomly from choices #",
"elif NumPlayers >= -6 and NumPlayers <= -2: print (\"Like tears in rain.\")",
"most of thie duplicate logic from GetMoves and have it here only. #",
"but it serves as a # check because I was having problems. :)",
"finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt, finish-HOME+1, note)) elif",
"Just in case, clear out any previous note # If this marble is",
"or something else. # def Display(): # Color! # ANSI color codes for",
"except ValueError: print (\"Please enter a number between 2 and 6.\") Setup =",
"0 except ValueError: print (\"Please enter a number between 2 and 6.\") Setup",
"and not special: note += \"Bonk \" + Board[finalspot] if finalspot in MagicCircle",
"of circle MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle",
"Start[color] if myStart == 0: # HACK for Blue with start of 0",
"die=%d)\" %(color,die)) # List that we'll be returning with ALL valid moves response",
"traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate",
"BASE, BASE ] robotMode = 0 Setup = 0 # Has the game",
"if marble in MagicCircle: # magicStart is the index of where we are",
"Checked all intermediate spaces, and destination space homeloc = destination - HOME #",
"Try again.\") GotInput = 0 except ValueError: if len(moves) == 1: selection =",
"Board[MoveToCheck] == color: # Handle case where I roll a 6 and want",
"given the # value. So here's a bunch of casting black magic to",
"not ValidMove(dude, CENTER, die, color): assert False distance = BOARDSIZE - 8 response.append([dude,",
"\"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset",
"color moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\" else: # Board destination is",
"(\"Please enter a number between 2 and 6.\") Setup = 0 except ValueError:",
"die, color): assert False distance = BOARDSIZE - 8 response.append([dude, CENTER, note, distance])",
"dude in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) - Check %d\" %(color,die,dude)) note",
"in range(1,die) if not badMove: # Valid moves only loc = dude+die #",
"+ str(source-HOME+1) + \"] -> \" else: assert Board[source] == color Board[source] =",
"Board[(marble+i)%BOARDSIZE] == color: return False return True # Catch all assert False return",
"one past the MagicCircle, then I # can enter the Center. dude+die-1 is",
"] Board = [\"\" for x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle =",
"Special case of 6 in the magic circle ending where you start if",
"spaces between here and my final location for # teammates for i in",
"in rain.\") robotMode = 1 elif NumPlayers < 2 or NumPlayers > 6:",
"over turnNum = 0 robotMode = 0 # A human is needed numPlayers",
"# Deal with possible destinations if destination == CENTER: assert CenterSpace != color",
"elif strt == CENTER: print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif strt ==",
"= [\"\" for x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [ 7,",
"means Home Position for i in range(1,die+1): if(hp+i >= HOMESIZE): return False if",
"+ \": Please select an option: \")) GotInput = 1 if selection <",
"i in range(0,BOARDSIZE): space = Board[i] if space == \"\": # Use a",
"die > 0 and die < 7 assert color # Quick check to",
"and 6 # def Roll(): return randint(1,6) # # Display(): # # Prints",
"and 6.\") Setup=0 except KeyError: print (\"Please enter a number between 2 and",
"again=1 # Flag for when a player rolls a 6 while again: again=0",
"case of 6 in the magic circle ending where you start if marble",
"if finish >= HOME: if strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\"",
"Used by .sorted to return lists in order def SortMoves(sub_li): sub_li.sort(key = lambda",
"1 if t==0: # The magic circle is poisoned from # here on",
"back to the nearest magic circle space, checking # that walk. if Board[destination-i]",
"die=%d, color=%s)\" %(marble, destination, die, color)) assert die > 0 and die <",
"and homeloc < HOMESIZE: return True assert False return False # Something insane",
"BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done!",
"get the color given the # value. So here's a bunch of casting",
"can bop around by adding die values # to the index in that",
"\\ %(option, strt-HOME+1, finish-HOME+1, note)) else: print(\"\\t[%d] %d -> Home[%d] %s\" %(option, strt,",
"finish,note)) else: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish, note)) option+=1",
"Board[source] = \"\" moveDesc += \"\" + str(source) + \" -> \" #",
"I need to get the color given the # value. So here's a",
"Blood shall flow GotInput = 1 for i in range(0,len(moves)): if \"Bonk\" in",
"False return False # Something insane happened? # Movement WITHIN Home if marble",
"the spaces between here and my final location for # teammates for i",
"elif Home[color][testloc]: # somebody in the way return False else: # Still on",
"I can roll out to any # magic circle space if dude ==",
"i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue if not",
"x[0]) return sub_li # # GetMoves (color, die) # # Return a list",
"\"Green\": # Take a random option selection = randint(1,len(moves)) GotInput = 1 while",
"if IsWinner(pColor): print (\"%s wins in %d turns!\" %(pColor, turnNum)) GameOver = 1",
"note += \"[\" if finalspot in MagicCircle: note += \"Magic Circle\" if finalspot",
"end=\"\") for b in Base[p]: if b == \"\": #print (\"-\", end=\"\") print",
"magic spaces between where I entered # and where I exited for j",
"the magic circle, I can continue normal track, or # hop one magic",
"magic circle, I can continue normal track, or # hop one magic circle",
"this marble is in Base, see if it can get out if marble",
"processing: If the roll is a 6 in magic # circle, that isn't",
"a new game, and assigns player colors. # Returns: Number of Players #",
"a teammate between dude # and out badMove=0 circleBlock=0 # Check magic circle",
"MagicCircle, then I # can enter the Center. marble+die-1 is equal to MagicCircle+1",
"\"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color codes for start startColor={",
"6) and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if Board[Start[color]]: note += \"",
"spaces, and destination space homeloc = destination - HOME # homeloc is (potential)",
"# Main game loop turnNum += 1 for p in range(0,numPlayers): again=1 #",
"100, 101, 102, 103 HOMESIZE=4 # How big is your home? Colors =",
"if not ValidMove(dude, dude+die, die, color): assert False response.append([dude, dude+die, \"[Home]\", 0]) #",
"!= MoveToCheck: # If it is not me, then it is someone else",
"6: return False return True else: # The destination is not in the",
"note = \"[Start\" if Board[Start[color]]: note += \" & Bonk \" + Board[Start[color]]",
"range(0,BOARDSIZE): space = Board[i] if space == \"\": # Use a * to",
"for i in range(0,die+1): if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check",
"6: pass else: badMove = 1 # Check regular spots after I left",
"ready for a new game, and assigns player colors. # Returns: Number of",
"+ str(destination-HOME+1) + \"]\" else: # Board destination is not the center or",
"6! Take another turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s wins in %d",
"die is valid # Returns True / False # # This is pretty",
"== color: return False return True # Leaving the Center space if marble",
"values # to the index in that list circleNum = MagicCircle.index(dude) # Lots",
"in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \" & \" if Board[(dude+die)%BOARDSIZE]: note +=",
"it here only. # But, you know, this is working. # def ValidMove(marble,",
"\"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the color to default creset=\"\\033[m\" output =",
"x: x[3]) #sub_li.sort(reverse=True,key = lambda x: x[0]) return sub_li # # GetMoves (color,",
"modulo problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color]",
"\"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color: selfPass = 0 for",
"== 3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4",
"yep=1 for i in range(1,die+1): if Board[dude+i] == color: yep=0 if yep: note",
"False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on the Board elif loc <",
"start if marble == destination and die == 6 and marble in MagicCircle:",
"main board if Board[testloc] == color: # Can't pass teammate return False #",
"= MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit + (die-i))%BOARDSIZE # Now verify that I didn't",
"death. BASE=99 # \"Location\" for base spots. All are 99. HOME=100 # \"Location\"",
"not ValidMove(dude, Start[color], die, color): assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue",
"How many spaces into Home? if testloc >= HOMESIZE: # Ran off the",
"selfPass = 1 continue if not selfPass: note = \"\" if (dude+die)%BOARDSIZE in",
"ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\",",
"after I left circle for t in range(0,die-i+1): # t is number of",
"enter a number between 2 and 6.\") Setup = 0 except ValueError: print",
"\" if CenterSpace: print (\"Bonk! %s hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk",
"pColor == \"Green\": # Take a random option selection = randint(1,len(moves)) GotInput =",
"%(src,dst,myRoll,pColor)) return False response = Move(pColor, src, dst) Display() print (response) if myRoll",
"I'm in the magic circle, I can continue normal track, or # hop",
"%d turns!\" %(pColor, turnNum)) GameOver = 1 return # We're out of here!",
"def ValidMove(marble, destination, die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble,",
"space if marble == CENTER: if die==1 and Board[destination] != color: return True",
"of 0, # because of modulo problems. elif (dude < Start[color] and (dude+die)%BOARDSIZE",
"color in Players # print (\"[Entering] GetMoves(color=%s die=%d)\" %(color,die)) # List that we'll",
"p in Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for b in",
"NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\")",
"Board[(marble+i)%BOARDSIZE] == color: return False return True # Leaving the Center space if",
"(\"Kill!\", moves[i]) break if not selection: selection = 1 elif pColor == \"Cyan\"",
"destination, die) # # Check if the move from marble to destination via",
"pretty much a duplicate of GetMoves() but it serves as a # check",
"range(1,die+1): testloc = marble+i if testloc >= myStart: # testloc is in the",
"assert die > 0 and die < 7 assert color in Colors assert",
"Home if homeloc >= 0 and homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc,",
"if valid: if not ValidMove(dude, dude+die, die, color): assert False response.append([dude, dude+die, \"[Home]\",",
"not GameOver: # Main game loop turnNum += 1 for p in range(0,numPlayers):",
"Board[source] == color Board[source] = \"\" moveDesc += \"\" + str(source) + \"",
"in Home[p]: if h == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else:",
"MagicCircle: return True # MAGIC CIRCLE HANDLING if marble in MagicCircle: # magicStart",
"== 6: return False return True else: # The destination is not in",
"(\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if not moves: print",
"False response = Move(pColor, src, dst) Display() print (response) if myRoll == 6:",
"Purple kills # Green picks randomly from choices # Blue is the player",
"spaces between where I entered # and where I exited for j in",
"return False for i in range(1,die+1): if Board[(marble+i)%BOARDSIZE] == color: return False return",
"around the main track CENTER=98 # \"Location\" of the center of death. BASE=99",
"Returns: Number of Players # def Setup(): # Initialize the bases and colors",
"between 2 and 6.\") Setup=0 except KeyError: print (\"Please enter a number between",
"0 print (\"Preparing a %d player game.\" %NumPlayers) if NumPlayers == 2 or",
"assert marble != BASE # CENTER SPACE HANDLING # If my roll can",
"Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove (marble, destination,",
"to add Start once for dude in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d)",
"\" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle",
"21, 35, 49, 63, 77 ] # Locations for the magic circle spaces",
"# hp means Home Position for i in range(1,die+1): if(hp+i >= HOMESIZE): return",
"Board[finalspot] if finalspot in MagicCircle or Board[finalspot]: note += \"]\" if not ValidMove(dude,",
"destination, die, color)) assert die > 0 and die < 7 assert color",
"(Board[finalspot]): note += \"[\" if finalspot in MagicCircle: note += \"Magic Circle\" if",
"\"\" + str(destination) + \" \" # Deal with bonking if destination is",
"t in range(0,die-i+1): # t is number of hops out of circle MoveToCheck",
"one, I can roll out to any # magic circle space if dude",
"CenterSpace != color: yep=1 for i in range(1,die+1): if Board[dude+i] == color: yep=0",
"% BOARDSIZE response.append([dude, finalspot, note, distance]) # MOVEMENT INTO HOME # NB: Add",
"# Display(): # # Prints out the state of the board. # XXX:",
"HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on the",
"of where we are in the magic # circle list, so we can",
"if marble == destination and die == 6: return False return True else:",
"Something insane happened? # Movement WITHIN Home if marble >= HOME: assert marble",
"\"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\": 70 }",
"\"\" + str(source) + \" -> \" # Deal with possible destinations if",
"moveDesc += \"[Base] -> \" elif source >= HOME: Home[color][source-HOME] = \"\" moveDesc",
">= 21 and i < 42: if i == 31: if CenterSpace: cen",
"(ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in Home[p]: if h == \"\":",
"== Start[color] if (die == 1 or die == 6) and (Board[Start[color]]!=color): return",
"can get out if dude == BASE: if (die == 1 or die",
"track, or # hop one magic circle space and then continue the normal",
"else: print (\"Bad input\") GotInput = 0 except TypeError: print (\"Bad input\") GotInput",
"# track, or hope 2 magic circle space and then continue the #",
"range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through teammate # 6 in magic",
"\"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color codes for start startColor={ \"Blue\":",
"need to get the color given the # value. So here's a bunch",
"color): assert False response.append([dude, Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue # #",
"the way badMove = 1 else: # Still on the main board if",
"my roll can take me to one past the MagicCircle, then I #",
"# send a guy back to base # def Bonk(space): if space ==",
"HOMESIZE): return False if hp+i > HOMESIZE or hm[hp+i] == color: return False",
"CenterSpace moveDesc = color + \": \" # Remove marble from source if",
"is not me, then it is someone else badMove = 1 if t==0:",
"\"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", }",
"\" elif source == BASE: # Remove the marble from the base assert",
"i == 0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if i == 20: print()",
"== color: return False return True assert marble not in MagicCircle # MOVEMENT",
"print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): # #",
">= 0 and homeloc < HOMESIZE: return True assert False return False #",
"list of the valid player options with a die roll # def GetMoves(color,die):",
"codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\":",
"print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for b in Base[p]: if b",
"+= \"[Base] -> \" elif source >= HOME: Home[color][source-HOME] = \"\" moveDesc +=",
"the Center. dude+die-1 is equal to MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace",
"#output[i] = \"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4) # cool circle thing",
"CIRCLE HANDLING if marble in MagicCircle: # magicStart is the index of where",
"(\"-\", end=\"\") # Occupied space else: # If you're on the magic circle,",
"# Main # def Main(): GameOver = 0 # Is the game over",
"is not the center or Home assert Board[destination] != color moveDesc += \"\"",
"myStart: # testloc is in the Home zone testloc -= myStart # How",
"-> %d %s\" %(option,finish,note)) elif strt == BASE: print (\"\\t[%d] Base -> Start",
"note = \"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if",
"you're on the magic circle, you get an upper case # letter if",
"\"\\033[1;31;47m\", } # ANSI color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\",",
"== color and marble != destination and die != 6: return False #",
"-> \" elif source >= HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\" +",
"you know, this is working. # def ValidMove(marble, destination, die, color): # print",
"can take me to one past the MagicCircle, then I # can enter",
"MagicCircle.index(marble) for i in range(0,die+1): if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) #",
"# Used by .sorted to return lists in order def SortMoves(sub_li): sub_li.sort(key =",
"circle ending where you start if marble == destination and die == 6",
"src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d, %d, %s)\" %(src,dst,myRoll,pColor))",
"= 1 while not GotInput: option=1 # Counter for the user input menu",
"= (circleExit + (die-i))%BOARDSIZE # Now verify that I didn't pass a teammate",
"0 myStart = BOARDSIZE for i in range(1,die+1): testloc = dude+i if not",
"the game been setup? while not Setup: try: Setup=1 NumPlayers = int(input(\"How many",
"board game that can be very # frustrating! # from random import randint",
"if it can get out if dude == BASE: if (die == 1",
"to MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace != color: yep=1 for i",
"if testloc >= myStart: # testloc is in the Home zone testloc -=",
"in Colors: Base[c] = [ c, c, c, c] Home[c] = [ \"\",",
"pass teammate return False # Checked all intermediate spaces, and destination space homeloc",
"%d -> Center %s\" %(option,strt,note)) elif finish in MagicCircle: print (\"\\t[%d] %d ->",
"# # Roll a die. # Returns an int between 1 and 6",
"#cen = chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else:",
"much a duplicate of GetMoves() but it serves as a # check because",
"HOME: assert marble < HOME+HOMESIZE assert destination >= HOME hm = Home[color] #",
"Home[color][testloc]: # somebody in the way badMove = 1 else: # Still on",
"belong to us. Marbles[c] = [BASE, BASE, BASE, BASE ] robotMode = 0",
"0 Base[color].remove(color) # The destination is that color's start destination = Start[color] moveDesc",
"< 21: if i == 0: print (\"\\t\", end=\"\") print(output[i], end=\"\") if i",
"hm = Home[color] # hm means Home[color] hp = marble-HOME # hp means",
"Blue with start of 0 myStart = BOARDSIZE for i in range(1,die+1): testloc",
"not ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still",
"moves available.\") continue GotInput = 0 selection = 0 # Red always goes",
"# do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i]",
"== destination and die == 6: return False return True else: # The",
"note = \"\" if (finalspot in MagicCircle) or (Board[finalspot]): note += \"[\" if",
"thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset output[i] = startColor[thiscolor]+chr(0x033F)+creset #print",
"response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response)",
"7 assert color in Colors assert color in Players # print (\"[Entering] GetMoves(color=%s",
"Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude,",
"+= \"]\" if not ValidMove(dude, CENTER, die, color): assert False distance = BOARDSIZE",
"a die. # Returns an int between 1 and 6 # def Roll():",
"the main board if Board[testloc%BOARDSIZE] == color: # Can't pass teammate badMove =",
">= HOME: if strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option,",
"color and marble != destination and die != 6: return False # If",
"IsWinner(color) # # Determine if color has won. Returns True/False # def IsWinner(color):",
"[\"-\" for x in range(0,BOARDSIZE)] for i in range(0,BOARDSIZE): space = Board[i] if",
"= \"[Magic Circle\" if Board[i]: note += \" & Bonk \" + Board[i]",
"if not selection: selection = 1 elif pColor == \"Cyan\" or pColor ==",
"except KeyError: print (\"Please enter a number between 2 and 6.\") Setup =",
"see if it can get out if marble == BASE: assert destination ==",
"valid # Returns True / False # # This is pretty much a",
"color to default creset=\"\\033[m\" output = [\"-\" for x in range(0,BOARDSIZE)] for i",
"someone else badMove = 1 if t==0: # The magic circle is poisoned",
"(finalspot in MagicCircle) or (Board[finalspot]): note += \"[\" if finalspot in MagicCircle: note",
"not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE -",
"if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all the magic spaces",
"print (\"%s wins in %d turns!\" %(pColor, turnNum)) GameOver = 1 return #",
"(startColor[thiscolor]+\"#\"+creset, end=\"\") elif i % 10 == 0: output[i] = str(i // 10)",
"space homeloc = loc - myStart # homeloc is home space # Move",
"== CENTER: print (\"\\t[%d] %d -> Center %s\" %(option,strt,note)) elif finish in MagicCircle:",
"1 GotInput = 1 else: print (\"Bad input\") GotInput = 0 except TypeError:",
"for when a player rolls a 6 while again: again=0 pColor = Players[p]",
"else. # def Display(): # Color! # ANSI color codes for the marbles",
"== CENTER: assert CenterSpace != color moveDesc += \"[Center] \" if CenterSpace: print",
"- ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! #",
"0 and die < 7 assert color # Quick check to see if",
"\" \" # Deal with bonking if destination is not empty if Board[destination]:",
"BOARDSIZE = 84 # Number of space around the main track CENTER=98 #",
"for p in Players: print (\"%s\\t\" %p, end=\"\") print (\"Base:\\t\", end=\"\") for b",
"in MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4) # cool",
"the destination if destination < BOARDSIZE: if Board[destination] == color and marble !=",
"in the magic circle, so walk # back to the nearest magic circle",
"Board[(dude+die)%BOARDSIZE]: note += \" & \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" +",
"> HOMESIZE or hm[hp+i] == color: valid=0 continue if valid: if not ValidMove(dude,",
"in the center and I got a one, I can roll out to",
"possible destinations if destination == CENTER: assert CenterSpace != color moveDesc += \"[Center]",
"1 for p in range(0,numPlayers): again=1 # Flag for when a player rolls",
"start destination = Start[color] moveDesc += \"[Base] -> \" elif source >= HOME:",
"+= \"\" + str(destination) + \" \" # Deal with bonking if destination",
"Roll() print (\"\\n%s rolled: %d\\n\" %(pColor, myRoll)) moves = GetMoves(pColor, myRoll) if not",
"\"White\": # Always take the first option selection = 1 GotInput = 1",
"in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note +=",
"# How many spaces into Home? if testloc >= HOMESIZE: # Ran off",
"to get the color given the # value. So here's a bunch of",
"Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance]) # Done! # print (\"[Leaving] GetMoves(color=%s",
"and die == 6: return False return True else: # The destination is",
"magic circle space if dude == CENTER: if die==1: for i in MagicCircle:",
"it can get out if marble == BASE: assert destination == Start[color] if",
"+= \"\" + str(source) + \" -> \" # Deal with possible destinations",
"for # teammates for i in range(1,die+1): testloc = marble+i if testloc >=",
"# ANSI color codes for start startColor={ \"Blue\": \"\\033[1;34;40m\", \"Red\": \"\\033[1;31;40m\", \"Cyan\": \"\\033[1;36;40m\",",
"== 0 and dude < Start[color]+BOARDSIZE and dude+die >= Start[color]+BOARDSIZE): badMove = 0",
"magic # circle list, so we can bop around by adding die values",
"with start of 0 myStart = BOARDSIZE for i in range(1,die+1): testloc =",
"win=0 break return bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root,",
"= tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop()",
"selection = i+1 print (\"Kill!\", moves[i]) break if not selection: selection = 1",
"MagicCircle: # magicStart is the index of where we are in the magic",
"checking # that walk. if Board[destination-i] == color: return False return True assert",
"GotInput: option=1 # Counter for the user input menu for move in moves:",
"if numPlayers <= 0: robotMode = 1 numPlayers *= -1 # TkSetup() Display()",
"MOVEMENT if marble not in MagicCircle and marble < BOARDSIZE and destination <",
"was having problems. :) I should probably remove # most of thie duplicate",
"Start[color] and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] == 0 and dude <",
"{} # Dict of each color's base status Home = {} # Dict",
"dst) Display() print (response) if myRoll == 6: print(\"%s rolled a 6! Take",
"def Display(): # Color! # ANSI color codes for the marbles ccode={ #",
"ValidMove(dude, loc, die, color): assert False distance = BOARDSIZE - (loc - Start[color])",
"end=\"\") print (\"\\tHome:\\t\", end=\"\") for h in Home[p]: if h == \"\": #print",
"%(color,die,dude)) note =\"\" # Just in case, clear out any previous note #",
"else: assert Board[source] == color Board[source] = \"\" moveDesc += \"\" + str(source)",
"MoveToCheck = (circleExit + t)%BOARDSIZE if Board[MoveToCheck] == color: # Handle case where",
"= 1 if t==0: # The magic circle is poisoned from # here",
"ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset, end=\"\") for i",
"%(option, strt, finish,note)) else: print (\"\\t[%d] %d -> %d %s\" %(option, strt, finish,",
"Home? if testloc >= HOMESIZE: # Ran off the end of Home return",
"problems. :) I should probably remove # most of thie duplicate logic from",
"myself if mc == 6: pass else: badMove = 1 # Check regular",
"14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\": 70 } # # Roll():",
"is a 6 in magic # circle, that isn't bonking because it is",
"I'm in the center and I got a one, I can roll out",
"= color + \": \" # Remove marble from source if source ==",
"NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or NumPlayers ==",
"magic circle space and then continue the normal # track, or hope 2",
"die) # # Check if the move from marble to destination via die",
"== color: # Handle case where I roll a 6 and want to",
"\" + Board[(dude+die)%BOARDSIZE] if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"]\" if",
"HOMESIZE: return True assert False return False # Something insane happened? # Movement",
"assigns player colors. # Returns: Number of Players # def Setup(): # Initialize",
"0: robotMode = 1 numPlayers *= -1 # TkSetup() Display() # Show the",
"continue normal track, or # hop one magic circle space and then continue",
"False # # SortMoves(myList) # # Used by .sorted to return lists in",
"# t is number of hops out of circle MoveToCheck = (circleExit +",
"choices # Blue is the player .. or she chooses 1 # Deckard",
"moveDesc += \"Home[\" + str(destination-HOME+1) + \"]\" else: # Board destination is not",
"continue if not selfPass: note = \"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]:",
"to default creset=\"\\033[m\" output = [\"-\" for x in range(0,BOARDSIZE)] for i in",
"for x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [ 7, 21, 35,",
"if Board[destination]: moveDesc += \"Bonk \" + Board[destination] + \"!\" print (\"Bonk! %s",
"\"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination): # # Move marble",
"range(0,BOARDSIZE): if i >=0 and i < 21: if i == 0: print",
"= Start[color] moveDesc += \"[Base] -> \" elif source >= HOME: Home[color][source-HOME] =",
"-> \" else: assert Board[source] == color Board[source] = \"\" moveDesc += \"\"",
"> HOMESIZE or hm[hp+i] == color: return False return True # \"NORMAL\" MOVEMENT",
"BOARDSIZE - (i - Start[color]) % BOARDSIZE response.append([dude, i, note, distance]) continue assert",
"elif source == BASE: # Remove the marble from the base assert Base[color].count(color)",
"for b in Base[p]: if b == \"\": #print (\"-\", end=\"\") print (chr(0x00B7),",
"return True # \"NORMAL\" MOVEMENT if marble not in MagicCircle and marble <",
"# Movement WITHIN Home elif dude >= HOME: hm = Home[color] # hm",
"to base # def Bonk(space): if space == CENTER: deadGuy = CenterSpace else:",
"Home[color] hp = dude-HOME # hp means Home Position valid=1 for i in",
"(die == 1 or die == 6) and (Board[Start[color]]!=color) and (1==firstStart): note =",
"option+=1 try: selection = int(input(pColor + \": Please select an option: \")) GotInput",
"6.\") Setup = 0 except ValueError: print (\"Please enter a number between 2",
"bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas = tk.Canvas(root, width=200, height=200, borderwidth=0,",
"i == 20: print() elif i >= 21 and i < 42: if",
"between dude # and out badMove=0 circleBlock=0 # Check magic circle spots I",
"If you're on the magic circle, you get an upper case # letter",
"moveDesc += \"Home[\" + str(source-HOME+1) + \"] -> \" else: assert Board[source] ==",
"circle is poisoned from # here on out.. circleBlock = 1 continue if",
"Center. dude+die-1 is equal to MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace !=",
"status Marbles = {} # Dict of each color's marble locations Players =",
"randomly from choices # Blue is the player .. or she chooses 1",
"and 6.\") Setup = 0 except TypeError: print (\"Please enter a number between",
"elif strt == BASE: print (\"\\t[%d] Base -> Start %s\" %(option,note)) else: if",
"elif NumPlayers < 2 or NumPlayers > 6: print (\"Please enter a number",
"circleBlock=0 # Check magic circle spots I traversed for mc in range(1,i+1): if",
"if die==1: for i in MagicCircle: if Board[i] != color: note = \"[Magic",
"= 0 Setup = 0 # Has the game been setup? while not",
"homeloc >= 0 and homeloc < HOMESIZE: if not ValidMove(dude, HOME+homeloc, die, color):",
"or pColor == \"Purple\": # Blood shall flow GotInput = 1 for i",
"(\"-\") output[i] = chr(0x00B7) # A nice dot #print (\"-\", end=\"\") # Occupied",
"(Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if Board[Start[color]]: note += \" & Bonk",
"lists in order def SortMoves(sub_li): sub_li.sort(key = lambda x: x[3]) #sub_li.sort(reverse=True,key = lambda",
"True else: return False assert marble != CENTER assert destination != CENTER #",
"note += \" & \" if Board[finalspot] and not special: note += \"Bonk",
"+ \"]\" if not ValidMove(dude, loc, die, color): assert False distance = BOARDSIZE",
"= color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove (marble, destination, die) #",
"Still on the main board if Board[testloc%BOARDSIZE] == color: # Can't pass teammate",
"False elif Home[color][testloc]: # somebody in the way return False else: # Still",
"= Home[color] # hm means Home[color] hp = dude-HOME # hp means Home",
"MOVEMENT INTO HOME myStart = Start[color] if myStart == 0: # I have",
"note += \" & Bonk \" + Board[i] note += \"]\" if not",
"distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) % BOARDSIZE response.append([dude, (dude+die)%BOARDSIZE, note, distance])",
"or Board[(dude+die)%BOARDSIZE]: note += \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False",
"duplicate of GetMoves() but it serves as a # check because I was",
"+= \" & \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE] if",
"(\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset, end=\"\") print (\"\\tHome:\\t\", end=\"\") for",
"Check if the move from marble to destination via die is valid #",
"MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note +=",
"equal to MagicCircle+1 if dude+die-1 in MagicCircle and CenterSpace != color: yep=1 for",
"Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # # Bonk! # # send a guy",
"color moveDesc += \"[Center] \" if CenterSpace: print (\"Bonk! %s hits %s!\" %(color,",
"and then continue the # normal track, or ... if dude in MagicCircle:",
"base are belong to us. Marbles[c] = [BASE, BASE, BASE, BASE ] robotMode",
"\"Purple\": # Blood shall flow GotInput = 1 for i in range(0,len(moves)): if",
"# List of active players # Marbles[color] : { location0, location1, location2, location3",
"firstStart=1 # Only want to add Start once for dude in Marbles[color]: #",
"= \"-\" #cen = chr(216) cen = chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\"",
"elif NumPlayers == 5 or NumPlayers == -5: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\")",
"not in MagicCircle # MOVEMENT INTO HOME myStart = Start[color] if myStart ==",
"die roll # def GetMoves(color,die): assert die > 0 and die < 7",
"ValidMove(dude, i, die, color): assert False distance = BOARDSIZE - (i - Start[color])",
"# The destination is not in the magic circle, so walk # back",
"response.append([dude, loc, note, distance]) # Movement WITHIN Home elif dude >= HOME: hm",
"for move in moves: strt, finish, note, distance = move if finish >=",
"myRoll)) moves = GetMoves(pColor, myRoll) if not moves: print (\"No moves available.\") continue",
"back to base # def Bonk(space): if space == CENTER: deadGuy = CenterSpace",
"response = Move(pColor, src, dst) Display() print (response) if myRoll == 6: print(\"%s",
"False return True # \"NORMAL\" MOVEMENT if marble not in MagicCircle and marble",
"Has the game been setup? while not Setup: try: Setup=1 NumPlayers = int(input(\"How",
"print(\"\\t[%d] Center -> %d %s\" %(option,finish,note)) elif strt == BASE: print (\"\\t[%d] Base",
"+= \"]\" if not ValidMove(dude, (dude+die)%BOARDSIZE, die, color): assert False distance = BOARDSIZE",
"for home spots - 100, 101, 102, 103 HOMESIZE=4 # How big is",
"%(color,Board[destination])) Bonk(destination) Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove (marble,",
"== color: selfPass = 1 continue if not selfPass: note = \"\" if",
"ANSI color codes for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\",",
"color has won. Returns True/False # def IsWinner(color): win=1 for i in range(0,",
"c in Colors: Base[c] = [ c, c, c, c] Home[c] = [",
"\"\", \"\", \"\", \"\"] # Where are my marbles? All your base are",
"i in MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset",
"HOME: hm = Home[color] # hm means Home[color] hp = dude-HOME # hp",
"number between 2 and 6.\") Setup = 0 except ValueError: print (\"Please enter",
"False # If this marble is in Base, see if it can get",
"1 and 6 # def Roll(): return randint(1,6) # # Display(): # #",
"and (Board[Start[color]]!=color) and (1==firstStart): note = \"[Start\" if Board[Start[color]]: note += \" &",
"if dude == CENTER: if die==1: for i in MagicCircle: if Board[i] !=",
"== BASE: print (\"\\t[%d] Base -> Start %s\" %(option,note)) else: if finish ==",
"MagicCircle: if Board[i] != color: note = \"[Magic Circle\" if Board[i]: note +=",
"Board[destination] = color Marbles[color].remove(source) Marbles[color].append(destination) return moveDesc # # ValidMove (marble, destination, die)",
"= dude+i if not badMove and testloc >= myStart: # testloc is in",
"mc == 6: pass else: badMove = 1 # Check regular spots after",
"(\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print it backwards print(\"\\n\") for p in",
"\" + Board[destination] + \"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination]",
"== -4: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") elif NumPlayers == 5 or NumPlayers ==",
"marble from source if source == CENTER: assert CenterSpace == color CenterSpace =",
"BOARDSIZE response.append([dude, i, note, distance]) continue assert dude != CENTER # MAGIC CIRCLE",
"if Board[MoveToCheck] == color: # Handle case where I roll a 6 and",
">= 42 and i < 63: if i == 42: print (\"\\t\", end=\"\")",
"note += \" & Bonk \" + Board[Start[color]] note += \"]\" if not",
"menu for move in moves: strt, finish, note, distance = move if finish",
"MagicCircle: output[i] = ccode[space]+space[0].upper()+creset #print (ccode[space]+space[0].upper()+creset, end=\"\") else: output[i] = ccode[space]+space[0].lower()+creset #print (ccode[space]+space[0].lower()+creset,",
"note, distance]) # MOVEMENT INTO HOME # NB: Add special cases for Blue,",
"that walk. if Board[destination-i] == color: return False return True assert marble not",
"the center of death. BASE=99 # \"Location\" for base spots. All are 99.",
"board if Board[testloc] == color: # Can't pass teammate return False # Checked",
"BOARDSIZE]) firstStart=0 continue else: continue # # Handle \"regular\" motion starting here: #",
"or Home assert Board[destination] != color moveDesc += \"\" + str(destination) + \"",
"3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif NumPlayers == 4 or",
"bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def Main(): GameOver = 0",
"i == 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen",
"CENTER, note, distance]) # If I'm in the center and I got a",
"of casting black magic to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] =",
"teammate return False # Checked all intermediate spaces, and destination space homeloc =",
"= BOARDSIZE for i in range(1,die+1): testloc = dude+i if not badMove and",
"def Bonk(space): if space == CENTER: deadGuy = CenterSpace else: deadGuy = Board[space]",
"range(1,die+1): testloc = dude+i if not badMove and testloc >= myStart: # testloc",
"selection: selection = 1 elif pColor == \"Cyan\" or pColor == \"Purple\" or",
"\" # Deal with possible destinations if destination == CENTER: assert CenterSpace !=",
"you get an upper case # letter if i in MagicCircle: output[i] =",
"send a guy back to base # def Bonk(space): if space == CENTER:",
"All are 99. HOME=100 # \"Location\" for home spots - 100, 101, 102,",
"# Special processing: If the roll is a 6 in magic # circle,",
"color: win=0 break return bool(win) def TkSetup(): root = tkinter.Tk() root.title(\"Marbles!\") canvas =",
"[magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\",",
"\"[Base] -> \" elif source >= HOME: Home[color][source-HOME] = \"\" moveDesc += \"Home[\"",
"IsWinner(pColor): print (\"%s wins in %d turns!\" %(pColor, turnNum)) GameOver = 1 return",
"\"Green\", \"White\" ] Board = [\"\" for x in range(0,BOARDSIZE)] CenterSpace = \"\"",
"str(destination) + \" \" # Deal with bonking if destination is not empty",
"0 and homeloc < HOMESIZE: return True assert False return False # Something",
"== BASE: assert destination == Start[color] if (die == 1 or die ==",
"Board[destination] + \"!\" print (\"Bonk! %s hits %s!\" %(color,Board[destination])) Bonk(destination) Board[destination] = color",
"marble+die >= myStart: # Test the spaces between here and my final location",
"\"\" if (dude+die)%BOARDSIZE in MagicCircle or Board[(dude+die)%BOARDSIZE]: note += \"[\" if (dude+die)%BOARDSIZE in",
"space# Start = { \"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\":",
"is not to play.\") NumPlayers = -6 robotMode = 1 elif NumPlayers >=",
"options with a die roll # def GetMoves(color,die): assert die > 0 and",
"is needed numPlayers = Setup() if numPlayers <= 0: robotMode = 1 numPlayers",
"= \"\" Marbles[deadGuy].append(BASE) Marbles[deadGuy].remove(space) Base[deadGuy].append(deadGuy) # # Move(color, source, destination): # # Move",
"-> Start %s\" %(option,note)) else: if finish == CENTER: print (\"\\t[%d] %d ->",
"(dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]:",
"chr(0x00A7) # Hurricane print(\"\\t%s\\t %s\\t %s\" %(output[104-i],cen,output[i])) else: print(\"\\t\"+output[104-i],\"\\t\\t \",output[i]) elif i >=",
"\" else: assert Board[source] == color Board[source] = \"\" moveDesc += \"\" +",
"\" + CenterSpace note += \"]\" if not ValidMove(dude, CENTER, die, color): assert",
"63: if i == 42: print (\"\\t\", end=\"\") print (output[104-i], end=\"\") # Print",
"Board[Start[color]]: note += \" & Bonk \" + Board[Start[color]] note += \"]\" if",
"badMove=0 circleBlock=0 # Check magic circle spots I traversed for mc in range(1,i+1):",
"# Remove marble from source if source == CENTER: assert CenterSpace == color",
"Check %d\" %(color,die,dude)) note =\"\" # Just in case, clear out any previous",
"i >= 42 and i < 63: if i == 42: print (\"\\t\",",
"teammate badMove = 1 # End of for i in range(1,die) if not",
"serves as a # check because I was having problems. :) I should",
"*= -1 # TkSetup() Display() # Show the initial game board while not",
"6 # def Roll(): return randint(1,6) # # Display(): # # Prints out",
"a player rolls a 6 while again: again=0 pColor = Players[p] myRoll =",
"\"[Home]\", 0]) # Still on the Board elif loc < myStart: if Board[loc]:",
"continue GotInput = 0 selection = 0 # Red always goes for the",
"Start %s\" %(option,note)) else: if finish == CENTER: print (\"\\t[%d] %d -> Center",
"# print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response) # # IsWinner(color) #",
"== 0: # HACK for Blue with start of 0 myStart = BOARDSIZE",
"6.\") Setup = 0 except TypeError: print (\"Please enter a number between 2",
"try: Setup=1 NumPlayers = int(input(\"How many players? \")) if NumPlayers == 0: print",
"the board. # XXX: This could be replaced with Tk or something else.",
"\"]\" if not ValidMove(dude, i, die, color): assert False distance = BOARDSIZE -",
"SortMoves(myList) # # Used by .sorted to return lists in order def SortMoves(sub_li):",
"True # \"NORMAL\" MOVEMENT if marble not in MagicCircle and marble < BOARDSIZE",
"in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all the magic spaces between where",
"dude >= HOME: hm = Home[color] # hm means Home[color] hp = dude-HOME",
"1 # Deckard is a replicant! if robotMode and pColor == \"Blue\": selection",
"source == CENTER: assert CenterSpace == color CenterSpace = \"\" moveDesc += \"[Center]",
"circle... for i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot = (circleExit +",
"True assert False return False # Something insane happened? # Movement WITHIN Home",
":) I should probably remove # most of thie duplicate logic from GetMoves",
"range(0,die+1): if destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all the magic",
"and die < 7 assert color in Colors assert color in Players #",
"as a # check because I was having problems. :) I should probably",
"Roll a die. # Returns an int between 1 and 6 # def",
"elif Home[color][testloc]: # somebody in the way badMove = 1 else: # Still",
"note += \" & \" if Board[(dude+die)%BOARDSIZE]: note += \"Bonk \" + Board[(dude+die)%BOARDSIZE]",
"== 1: selection = 1 GotInput = 1 else: print (\"Bad input\") GotInput",
"print (\"Like tears in rain.\") robotMode = 1 elif NumPlayers < 2 or",
"and (dude+die)%BOARDSIZE >= Start[color]) or \\ (Start[color] == 0 and dude < Start[color]+BOARDSIZE",
"= marble+i if testloc >= myStart: # testloc is in the Home zone",
"Display(): # # Prints out the state of the board. # XXX: This",
"> 0 and die < 7 assert color # Quick check to see",
"assert False distance = BOARDSIZE - (finalspot - Start[color]) % BOARDSIZE response.append([dude, finalspot,",
"hm[hp+i] == color: return False return True # \"NORMAL\" MOVEMENT if marble not",
"end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[h]+h[0].lower()+creset, end=\"\") print() # # Setup(): #",
"GotInput = 0 src,dst,note,distance = moves[selection-1] if not ValidMove(src,dst,myRoll,pColor): print (\"ERROR: ValidMove(%d, %d,",
"\"Bonk\" in moves[i][2]: selection = i+1 print (\"Kill!\", moves[i]) break if not selection:",
"tk.Canvas(root, width=200, height=200, borderwidth=0, bg=\"black\") canvas.grid() canvas.create_oval(100,100,200,200,fill=\"blue\",outline=\"#DDD\",width=4) root.mainloop() # # Main # def",
"\"Bonk \" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color elif destination >=",
"the valid player options with a die roll # def GetMoves(color,die): assert die",
"space if destination == CENTER: assert marble+die-1 in MagicCircle if CenterSpace == color:",
"once for dude in Marbles[color]: # print (\"[] GetMoves(color=%s die=%d) - Check %d\"",
"= 0 selection = 0 # Red always goes for the kill #",
"else: # If you're on the magic circle, you get an upper case",
"== 6: pass else: badMove = 1 # Check regular spots after I",
".. or she chooses 1 # Deckard is a replicant! if robotMode and",
"board while not GameOver: # Main game loop turnNum += 1 for p",
"range(1,die+1): if(hp+i >= HOMESIZE): valid=0 continue if hp+i > HOMESIZE or hm[hp+i] ==",
"permutations for magic circle... for i in range(0, die+1): circleExit = MagicCircle[(circleNum+i)%len(MagicCircle)] finalspot",
"== 31: if CenterSpace: cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen =",
"selection = 1 GotInput = 1 if pColor == \"Red\" or pColor ==",
"Board[destination] != color: return True else: return False assert marble != CENTER assert",
"not empty if Board[destination]: moveDesc += \"Bonk \" + Board[destination] + \"!\" print",
"destination-i in MagicCircle: magicDestination = MagicCircle.index(destination-i) # Check all the magic spaces between",
"a random option selection = randint(1,len(moves)) GotInput = 1 while not GotInput: option=1",
"print(\"%s rolled a 6! Take another turn.\" %pColor) again=1 if IsWinner(pColor): print (\"%s",
"note += \"[\" if (dude+die)%BOARDSIZE in MagicCircle: note += \"Magic Circle\" if (dude+die)%BOARDSIZE",
"BOARDSIZE - (loc - Start[color]) % BOARDSIZE response.append([dude, loc, note, distance]) # Movement",
"to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i] = startColor[thiscolor]+chr(0x00BB)+creset",
"with Tk or something else. # def Display(): # Color! # ANSI color",
"\"Cyan\": \"\\033[1;36;40m\", \"Purple\": \"\\033[1;35;40m\", \"Green\": \"\\033[1;32;40m\", \"White\": \"\\033[1;37;40m\", } # Reset the color",
"not special: note += \" & \" if Board[finalspot] and not special: note",
"< HOMESIZE: return True assert False return False # Something insane happened? #",
"destination is not empty if Board[destination]: moveDesc += \"Bonk \" + Board[destination] +",
"color)) assert die > 0 and die < 7 assert color # Quick",
"game that can be very # frustrating! # from random import randint #import",
"c] Home[c] = [ \"\", \"\", \"\", \"\"] # Where are my marbles?",
"entered # and where I exited for j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]]",
"\" + Board[finalspot] if finalspot in MagicCircle or Board[finalspot]: note += \"]\" if",
"but sucked so now takes 1 # Cyan takes option 1 # Purple",
"= 1 elif NumPlayers >= -6 and NumPlayers <= -2: print (\"Like tears",
"TypeError: print (\"Please enter a number between 2 and 6.\") Setup = 0",
"then I # can enter the Center. dude+die-1 is equal to MagicCircle+1 if",
"with a die roll # def GetMoves(color,die): assert die > 0 and die",
"marble is in Base, see if it can get out if marble ==",
"Position for i in range(1,die+1): if(hp+i >= HOMESIZE): return False if hp+i >",
"1 GotInput = 1 if pColor == \"Red\" or pColor == \"Purple\": #",
"= { \"Blue\": 0, \"Red\": 14, \"Cyan\": 28, \"Purple\": 42, \"Green\": 56, \"White\":",
"hits %s!\" %(color, CenterSpace)) moveDesc += \"Bonk \" + CenterSpace + \"!\" Bonk(CENTER)",
"MagicCircle: note += \"Magic Circle\" if finalspot in MagicCircle and Board[finalspot] and not",
"# Determine if color has won. Returns True/False # def IsWinner(color): win=1 for",
"# The magic circle is poisoned from # here on out.. circleBlock =",
"in MagicCircle or Board[finalspot]: note += \"]\" if not ValidMove(dude, finalspot, die, color):",
"# Just in case, clear out any previous note # If this marble",
"# \"Location\" for home spots - 100, 101, 102, 103 HOMESIZE=4 # How",
"2 magic circle space and then continue the # normal track, or ...",
"# Is the game over turnNum = 0 robotMode = 0 # A",
"Main(): GameOver = 0 # Is the game over turnNum = 0 robotMode",
"for i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass = 1 continue if",
"# homeloc is home space # Move into Home if homeloc >= 0",
"finalspot in MagicCircle or Board[finalspot]: note += \"]\" if not ValidMove(dude, finalspot, die,",
"# If I'm in the center and I got a one, I can",
"6 in magic circle means I can land on myself if mc ==",
"# # Move marble of color color from source to destination. # def",
"in the magic circle ending where you start if marble == destination and",
"can get out if marble == BASE: assert destination == Start[color] if (die",
"index of where we are in the magic # circle list, so we",
"MagicCircle.index(dude) # Lots of permutations for magic circle... for i in range(0, die+1):",
"badMove = 0 myStart = Start[color] if myStart == 0: # HACK for",
"color: note = \"[Magic Circle\" if Board[i]: note += \" & Bonk \"",
"[BASE, BASE, BASE, BASE ] robotMode = 0 Setup = 0 # Has",
"out.. circleBlock = 1 continue if circleBlock: continue if not badMove: # Add",
"be optimal, but sucked so now takes 1 # Cyan takes option 1",
"else badMove = 1 if t==0: # The magic circle is poisoned from",
"marble in MagicCircle: # magicStart is the index of where we are in",
"color): assert False distance = BOARDSIZE - 8 response.append([dude, CENTER, note, distance]) #",
"on out.. circleBlock = 1 continue if circleBlock: continue if not badMove: #",
"black magic to # do that. thiscolor = list(Start.keys())[list(Start.values()).index(i)] #output[i] = startColor[thiscolor]+\"#\"+creset output[i]",
"enter a number between 2 and 6.\") Setup = 0 except TypeError: print",
"want to add Start once for dude in Marbles[color]: # print (\"[] GetMoves(color=%s",
"[\"\" for x in range(0,BOARDSIZE)] CenterSpace = \"\" MagicCircle = [ 7, 21,",
"assert color # Quick check to see if there's a teammate at the",
"& Bonk \" + Board[Start[color]] note += \"]\" if not ValidMove(dude, Start[color], die,",
"i in MagicCircle: #output[i] = \"*\" #print (\"*\", end=\"\") output[i] = chr(0x00A4) #",
"base assert Base[color].count(color) > 0 Base[color].remove(color) # The destination is that color's start",
"if len(moves) == 1: selection = 1 GotInput = 1 else: print (\"Bad",
">= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1, note)) else:",
"is me. special=0 if dude == finalspot: # End where I started special=1",
"print() elif i >= 21 and i < 42: if i == 31:",
"if not ValidMove(dude, loc, die, color): assert False distance = BOARDSIZE - (loc",
"Players.append(\"Purple\") elif NumPlayers == 3 or NumPlayers == -3: Players.append(\"Blue\") Players.append(\"Cyan\") Players.append(\"Green\") elif",
"circle, you get an upper case # letter if i in MagicCircle: output[i]",
"MagicCircle and CenterSpace != color: yep=1 for i in range(1,die+1): if Board[dude+i] ==",
"and destination < BOARDSIZE: for i in range(1,die): if Board[(marble+i)%BOARDSIZE] == color: return",
"Start[color], note, BOARDSIZE]) firstStart=0 continue else: continue # # Handle \"regular\" motion starting",
"magicStart is the index of where we are in the magic # circle",
"probably remove # most of thie duplicate logic from GetMoves and have it",
"\"\": # Use a * to indicate magic circle if i in MagicCircle:",
"+= \" & \" if Board[finalspot] and not special: note += \"Bonk \"",
"marbles? All your base are belong to us. Marbles[c] = [BASE, BASE, BASE,",
"selection = 1 GotInput = 1 elif pColor == \"Green\": # Take a",
"strt == BASE: print (\"\\t[%d] Base -> Start %s\" %(option,note)) else: if finish",
"if dude == BASE: if (die == 1 or die == 6) and",
"(\"Please enter a number between 2 and 6.\") Setup=0 except KeyError: print (\"Please",
"An implementation of the classic marble board game that can be very #",
"only loc = dude+die # loc is destination space homeloc = loc -",
"\"\"] # Where are my marbles? All your base are belong to us.",
"\",output[i]) elif i >= 42 and i < 63: if i == 42:",
"the # value. So here's a bunch of casting black magic to #",
"except TypeError: print (\"Bad input\") GotInput = 0 src,dst,note,distance = moves[selection-1] if not",
"= dude+die # loc is destination space homeloc = loc - myStart #",
">= HOMESIZE: # Ran off the end of Home return False elif Home[color][testloc]:",
"<gh_stars>0 # Marbles! # # An implementation of the classic marble board game",
"assert False return False # Something insane happened? # Movement WITHIN Home if",
"in range(0,BOARDSIZE): if i >=0 and i < 21: if i == 0:",
"2 and 6.\") Setup=0 except KeyError: print (\"Please enter a number between 2",
"# def Bonk(space): if space == CENTER: deadGuy = CenterSpace else: deadGuy =",
"working. # def ValidMove(marble, destination, die, color): # print (\"[Entering] ValidMove(src=%d, dest=%d, die=%d,",
"# Occupied space else: # If you're on the magic circle, you get",
"marble < HOME+HOMESIZE assert destination >= HOME hm = Home[color] # hm means",
"to one past the MagicCircle, then I # can enter the Center. dude+die-1",
"of death. BASE=99 # \"Location\" for base spots. All are 99. HOME=100 #",
"INTO HOME myStart = Start[color] if myStart == 0: # I have grown",
"me. special=0 if dude == finalspot: # End where I started special=1 note",
"\"Cyan\": \"\\033[1;97;46m\", \"Purple\": \"\\033[1;97;45m\", \"Green\": \"\\033[1;35;42m\", \"White\": \"\\033[1;31;47m\", } # ANSI color codes",
"Deckard is a replicant! if robotMode and pColor == \"Blue\": selection = 1",
"=\"\" # Just in case, clear out any previous note # If this",
"myStart == 0: # HACK for Blue with start of 0 myStart =",
"101, 102, 103 HOMESIZE=4 # How big is your home? Colors = [",
"Handle \"regular\" motion starting here: # # CENTER SPACE HANDLING # If my",
"robotMode = 0 Setup = 0 # Has the game been setup? while",
"# circle list, so we can bop around by adding die values #",
"= Home[color] # hm means Home[color] hp = marble-HOME # hp means Home",
"die values # to the index in that list circleNum = MagicCircle.index(dude) #",
"ValidMove(dude, HOME+homeloc, die, color): assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on",
"normal track, or ... if dude in MagicCircle: # circleNum is the index",
"if strt >= HOME: print(\"\\t[%d] Home[%d] -> Home[%d] %s\" \\ %(option, strt-HOME+1, finish-HOME+1,",
"== 0: # I have grown to hate Blue in this game myStart",
"Dict of each color's home status Marbles = {} # Dict of each",
"ValidMove(src=%d, dest=%d, die=%d, color=%s)\" %(marble, destination, die, color)) assert die > 0 and",
"of for i in range(1,die) if not badMove: # Valid moves only loc",
"cen = ccode[CenterSpace]+CenterSpace[0].upper()+creset else: #cen = \"-\" #cen = chr(216) cen = chr(0x00A7)",
"= [ \"\", \"\", \"\", \"\"] # Where are my marbles? All your",
"dude == BASE: if (die == 1 or die == 6) and (Board[Start[color]]!=color)",
"= 1 elif NumPlayers < 2 or NumPlayers > 6: print (\"Please enter",
"badMove: # Valid moves only loc = dude+die # loc is destination space",
"note += \"Magic Circle\" if (dude+die)%BOARDSIZE in MagicCircle and Board[(dude+die)%BOARDSIZE]: note += \"",
"(dude+die)%BOARDSIZE, die, color): assert False distance = BOARDSIZE - ((dude+die)%BOARDSIZE - Start[color]) %",
"strt, finish, note, distance = move if finish >= HOME: if strt >=",
"for t in range(0,die-i+1): # t is number of hops out of circle",
"robotMode = 0 # A human is needed numPlayers = Setup() if numPlayers",
"for c in Colors: Base[c] = [ c, c, c, c] Home[c] =",
"# Add this to the list # Special processing: If the roll is",
"HOMESIZE: # Ran off the end of Home badMove = 1 elif Home[color][testloc]:",
"myRoll == 6: print(\"%s rolled a 6! Take another turn.\" %pColor) again=1 if",
"logic from GetMoves and have it here only. # But, you know, this",
"assert dude != CENTER # MAGIC CIRCLE HANDLING # If I'm in the",
"returning with ALL valid moves response = [] # For each marble, figure",
"= 1 GotInput = 1 else: print (\"Bad input\") GotInput = 0 except",
"robotMode = 1 elif NumPlayers >= -6 and NumPlayers <= -2: print (\"Like",
">= myStart: # testloc is in the Home zone testloc -= myStart #",
"CenterSpace)) moveDesc += \"Bonk \" + CenterSpace + \"!\" Bonk(CENTER) CenterSpace = color",
"\"Location\" for base spots. All are 99. HOME=100 # \"Location\" for home spots",
"6 in magic # circle, that isn't bonking because it is me. special=0",
"if dude in MagicCircle: # circleNum is the index of where we are",
"assert False response.append([dude, HOME+homeloc, \"[Home]\", 0]) # Still on the Board elif loc",
"magic circle spots I traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color:",
"out to any # magic circle space if dude == CENTER: if die==1:",
"assert False response.append([dude, dude+die, \"[Home]\", 0]) # \"NORMAL\" MOVEMENT elif Board[(dude+die)%BOARDSIZE] != color:",
"myStart = BOARDSIZE for i in range(1,die+1): testloc = dude+i if not badMove",
"color codes for the marbles ccode={ # [magic];Attrib;FG;BGm \"Blue\": \"\\033[1;97;44m\", \"Red\": \"\\033[1;97;41m\", \"Cyan\":",
"roll can take me to one past the MagicCircle, then I # can",
"game loop turnNum += 1 for p in range(0,numPlayers): again=1 # Flag for",
"i in range(1,die) if not badMove: # Valid moves only loc = dude+die",
"color: selfPass = 0 for i in range(1,die): if Board[(dude+i)%BOARDSIZE] == color: selfPass",
"color moveDesc += \"\" + str(destination) + \" \" # Deal with bonking",
"normal # track, or hope 2 magic circle space and then continue the",
"I traversed for mc in range(1,i+1): if Board[MagicCircle[(circleNum+mc)%len(MagicCircle)]] == color: # Passed through",
"clear out any previous note # If this marble is in Base, see",
"the base assert Base[color].count(color) > 0 Base[color].remove(color) # The destination is that color's",
"if b == \"\": #print (\"-\", end=\"\") print (chr(0x00B7), end=\"\") else: print (ccode[b]+b[0].lower()+creset,",
"setup? while not Setup: try: Setup=1 NumPlayers = int(input(\"How many players? \")) if",
"color + \": \" # Remove marble from source if source == CENTER:",
"Players.append(\"Cyan\") Players.append(\"Red\") else: Players.append(\"Blue\") Players.append(\"Purple\") Players.append(\"White\") Players.append(\"Cyan\") Players.append(\"Red\") Players.append(\"Green\") return NumPlayers # #",
"note, distance]) # Done! # print (\"[Leaving] GetMoves(color=%s die=%d) =\" %(color,die),response) return SortMoves(response)",
"... if dude in MagicCircle: # circleNum is the index of where we",
"{ location0, location1, location2, location3 } # Start[color] : space# Start = {",
"j in range(magicStart, magicDestination+1): if Board[MagicCircle[j]] == color: if marble == destination and"
] |
[
"self.btn4 = Button(window, text = \"Division\", command = divide) self.btn4.grid(row = 4, column",
"text = \"Multiplication\", command = multiply) self.btn3.grid(row = 4, column = 3) self.btn4",
"return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2",
"num2 = int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\")",
"= num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\")",
"Entry(window, bd = 3) self.txt2.grid(row = 3, column = 3) def add(): self.txt3.configure(state=\"normal\")",
"self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END,",
"= \"Multiplication\", command = multiply) self.btn3.grid(row = 4, column = 3) self.btn4 =",
"self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\") num1 =",
"\"Multiplication\", command = multiply) self.btn3.grid(row = 4, column = 3) self.btn4 = Button(window,",
"= 3) self.txt2 = Entry(window, bd = 3) self.txt2.grid(row = 3, column =",
"self.btn3 = Button(window, text = \"Multiplication\", command = multiply) self.btn3.grid(row = 4, column",
"\") self.lbl3.grid(row = 3, column = 2) self.txt = Entry(window, bd = 3)",
"= 0, columnspan = 5, pady = (10, 20)) self.lbl2 = Label(window, text",
"__init__(self, window): self.lbl1 = Label(window, text = \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan",
"= int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide():",
"= \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn",
"return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1",
"= 3) self.txt2.grid(row = 3, column = 3) def add(): self.txt3.configure(state=\"normal\") num1 =",
"= \"readonly\") self.txt3.grid(row = 5, column = 3) self.btn_clr = Button(window, text=\"Clear\", command=",
"class MyWindow: def __init__(self, window): self.lbl1 = Label(window, text = \"Simple Calculator\") self.lbl1.grid(row",
"Entry(window, bd = 3) self.txt.grid(row = 2, column = 3) self.txt2 = Entry(window,",
"= 1, pady = 20) self.btn2 = Button(window, text = \"Subtraction\", command =",
"weight=1) class MyWindow: def __init__(self, window): self.lbl1 = Label(window, text = \"Simple Calculator\")",
"self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get())",
"num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get())",
"3, column = 2) self.txt = Entry(window, bd = 3) self.txt.grid(row = 2,",
"2) self.txt = Entry(window, bd = 3) self.txt.grid(row = 2, column = 3)",
"3) self.txt2 = Entry(window, bd = 3) self.txt2.grid(row = 3, column = 3)",
"int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans))",
"= multiply) self.btn3.grid(row = 4, column = 3) self.btn4 = Button(window, text =",
"self.btn.grid(row = 4, column = 1, pady = 20) self.btn2 = Button(window, text",
"Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window): self.lbl1 =",
"column = 3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans",
"= int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state",
"column = 3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column = 3,",
"text = \"Addition\", command = add) self.btn.grid(row = 4, column = 1, pady",
"text = \"Division\", command = divide) self.btn4.grid(row = 4, column = 4, padx",
"1, pady = 20) self.btn2 = Button(window, text = \"Subtraction\", command = sub)",
"self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2",
"self.lbl3 = Label(window, text = \"Enter 2nd Number: \") self.lbl3.grid(row = 3, column",
"add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 + num2",
"self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn = Button(window,",
"ans = num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply():",
"= 2, column = 3) self.txt2 = Entry(window, bd = 3) self.txt2.grid(row =",
"self.btn2.grid(row = 4, column = 2, padx = (30, 15)) self.btn3 = Button(window,",
"self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0,",
"bd = 3) self.txt2.grid(row = 3, column = 3) def add(): self.txt3.configure(state=\"normal\") num1",
"= Label(window, text = \"Enter 1st Number: \") self.lbl2.grid(row = 2, column =",
"text = \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan = 5, pady = (10,",
"+ num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 =",
"self.txt2 = Entry(window, bd = 3) self.txt2.grid(row = 3, column = 3) def",
"\"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn = Button(window, text = \"Addition\", command",
"divide) self.btn4.grid(row = 4, column = 4, padx = 15) self.lbl4 = Label(window,",
"= Label(window, text = \"Result: \") self.lbl4.grid(row = 5, column = 2) self.txt3",
"multiply) self.btn3.grid(row = 4, column = 3) self.btn4 = Button(window, text = \"Division\",",
"= 15) self.lbl4 = Label(window, text = \"Result: \") self.lbl4.grid(row = 5, column",
"int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state",
"int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return",
"pady = 20) self.btn2 = Button(window, text = \"Subtraction\", command = sub) self.btn2.grid(row",
"4, padx = 15) self.lbl4 = Label(window, text = \"Result: \") self.lbl4.grid(row =",
"num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1",
"4, column = 4, padx = 15) self.lbl4 = Label(window, text = \"Result:",
"self.lbl4.grid(row = 5, column = 2) self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row",
"= 5, column = 3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column",
"sub) self.btn2.grid(row = 4, column = 2, padx = (30, 15)) self.btn3 =",
"window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window): self.lbl1 = Label(window,",
"END) self.txt2.delete(0, END) return self.btn = Button(window, text = \"Addition\", command = add)",
"window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window): self.lbl1 = Label(window, text = \"Simple",
"2, padx = (30, 15)) self.btn3 = Button(window, text = \"Multiplication\", command =",
"\"Enter 2nd Number: \") self.lbl3.grid(row = 3, column = 2) self.txt = Entry(window,",
"5, pady = (10, 20)) self.lbl2 = Label(window, text = \"Enter 1st Number:",
"num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1",
"self.txt2.grid(row = 3, column = 3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2",
"= \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn = Button(window, text = \"Addition\",",
"20) self.btn2 = Button(window, text = \"Subtraction\", command = sub) self.btn2.grid(row = 4,",
"Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window): self.lbl1 = Label(window, text",
"self.lbl2.grid(row = 2, column = 2) self.lbl3 = Label(window, text = \"Enter 2nd",
"self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans =",
"self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get())",
"self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\")",
"= \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan = 5, pady = (10, 20))",
"num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0, END)",
"self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return",
"\") self.lbl4.grid(row = 5, column = 2) self.txt3 = Entry(window, state = \"readonly\")",
"= divide) self.btn4.grid(row = 4, column = 4, padx = 15) self.lbl4 =",
"= 2, column = 2) self.lbl3 = Label(window, text = \"Enter 2nd Number:",
"Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column = 3, pady = 5) mywin=MyWindow(window) window.mainloop()",
"= num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\")",
"\"readonly\") self.txt3.grid(row = 5, column = 3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr)",
"= int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return",
"num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans))",
"return def divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans",
"num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\")",
"Button(window, text = \"Addition\", command = add) self.btn.grid(row = 4, column = 1,",
"column = 2) self.txt = Entry(window, bd = 3) self.txt.grid(row = 2, column",
"self.lbl1.grid(row = 0, columnspan = 5, pady = (10, 20)) self.lbl2 = Label(window,",
"= num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\")",
"0, columnspan = 5, pady = (10, 20)) self.lbl2 = Label(window, text =",
"divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2",
"\"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn =",
"Button(window, text = \"Subtraction\", command = sub) self.btn2.grid(row = 4, column = 2,",
"self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row = 5, column = 3) self.btn_clr",
"= Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column = 3, pady = 5) mywin=MyWindow(window)",
"btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END)",
"1st Number: \") self.lbl2.grid(row = 2, column = 2) self.lbl3 = Label(window, text",
"= \"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END)",
"def __init__(self, window): self.lbl1 = Label(window, text = \"Simple Calculator\") self.lbl1.grid(row = 0,",
"= \"Subtraction\", command = sub) self.btn2.grid(row = 4, column = 2, padx =",
"self.lbl3.grid(row = 3, column = 2) self.txt = Entry(window, bd = 3) self.txt.grid(row",
"column = 2, padx = (30, 15)) self.btn3 = Button(window, text = \"Multiplication\",",
"command = multiply) self.btn3.grid(row = 4, column = 3) self.btn4 = Button(window, text",
"int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def",
"= \"Enter 2nd Number: \") self.lbl3.grid(row = 3, column = 2) self.txt =",
"return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END)",
"text = \"Subtraction\", command = sub) self.btn2.grid(row = 4, column = 2, padx",
"str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans",
"END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 =",
"return self.btn = Button(window, text = \"Addition\", command = add) self.btn.grid(row = 4,",
"= 3) self.btn4 = Button(window, text = \"Division\", command = divide) self.btn4.grid(row =",
"5, column = 3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column =",
"= add) self.btn.grid(row = 4, column = 1, pady = 20) self.btn2 =",
"columnspan = 5, pady = (10, 20)) self.lbl2 = Label(window, text = \"Enter",
"= 5, pady = (10, 20)) self.lbl2 = Label(window, text = \"Enter 1st",
"= 3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans =",
"= \"Division\", command = divide) self.btn4.grid(row = 4, column = 4, padx =",
"= 3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column = 3, pady",
"Number: \") self.lbl2.grid(row = 2, column = 2) self.lbl3 = Label(window, text =",
"self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2",
"END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn = Button(window, text",
"\"Simple Calculator\") self.lbl1.grid(row = 0, columnspan = 5, pady = (10, 20)) self.lbl2",
"num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state =",
"self.lbl2 = Label(window, text = \"Enter 1st Number: \") self.lbl2.grid(row = 2, column",
"= Entry(window, bd = 3) self.txt.grid(row = 2, column = 3) self.txt2 =",
"= int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END,",
"column = 3) self.btn4 = Button(window, text = \"Division\", command = divide) self.btn4.grid(row",
"4, column = 3) self.btn4 = Button(window, text = \"Division\", command = divide)",
"num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\") num1",
"END) return self.btn = Button(window, text = \"Addition\", command = add) self.btn.grid(row =",
"def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0,",
"window = Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window):",
"= Label(window, text = \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan = 5, pady",
"window): self.lbl1 = Label(window, text = \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan =",
"ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state =",
"padx = (30, 15)) self.btn3 = Button(window, text = \"Multiplication\", command = multiply)",
"= 2) self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row = 5, column =",
"window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window): self.lbl1 = Label(window, text =",
"= 4, column = 4, padx = 15) self.lbl4 = Label(window, text =",
"self.txt3.configure(state = \"disabled\") self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn = Button(window, text =",
"add) self.btn.grid(row = 4, column = 1, pady = 20) self.btn2 = Button(window,",
"bd = 3) self.txt.grid(row = 2, column = 3) self.txt2 = Entry(window, bd",
"2) self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row = 5, column = 3)",
"self.lbl1 = Label(window, text = \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan = 5,",
"sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 - num2",
"= 2) self.txt = Entry(window, bd = 3) self.txt.grid(row = 2, column =",
"def multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0,",
"def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 -",
"= Label(window, text = \"Enter 2nd Number: \") self.lbl3.grid(row = 3, column =",
"= Button(window, text = \"Subtraction\", command = sub) self.btn2.grid(row = 4, column =",
"column = 3) self.txt2 = Entry(window, bd = 3) self.txt2.grid(row = 3, column",
"= 3) self.txt.grid(row = 2, column = 3) self.txt2 = Entry(window, bd =",
"15) self.lbl4 = Label(window, text = \"Result: \") self.lbl4.grid(row = 5, column =",
"Label(window, text = \"Result: \") self.lbl4.grid(row = 5, column = 2) self.txt3 =",
"2, column = 2) self.lbl3 = Label(window, text = \"Enter 2nd Number: \")",
"command = sub) self.btn2.grid(row = 4, column = 2, padx = (30, 15))",
"self.btn3.grid(row = 4, column = 3) self.btn4 = Button(window, text = \"Division\", command",
"import * window = Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def",
"= 2) self.lbl3 = Label(window, text = \"Enter 2nd Number: \") self.lbl3.grid(row =",
"= Entry(window, state = \"readonly\") self.txt3.grid(row = 5, column = 3) self.btn_clr =",
"4, column = 2, padx = (30, 15)) self.btn3 = Button(window, text =",
"= sub) self.btn2.grid(row = 4, column = 2, padx = (30, 15)) self.btn3",
"= int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END,",
"Button(window, text = \"Multiplication\", command = multiply) self.btn3.grid(row = 4, column = 3)",
"command = divide) self.btn4.grid(row = 4, column = 4, padx = 15) self.lbl4",
"column = 4, padx = 15) self.lbl4 = Label(window, text = \"Result: \")",
"= (30, 15)) self.btn3 = Button(window, text = \"Multiplication\", command = multiply) self.btn3.grid(row",
"= Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self, window): self.lbl1",
"self.txt3.grid(row = 5, column = 3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13,",
"= Button(window, text = \"Division\", command = divide) self.btn4.grid(row = 4, column =",
"self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0,",
"= int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\")",
"= 5, column = 2) self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row =",
"= 4, column = 2, padx = (30, 15)) self.btn3 = Button(window, text",
"Entry(window, state = \"readonly\") self.txt3.grid(row = 5, column = 3) self.btn_clr = Button(window,",
"\"Subtraction\", command = sub) self.btn2.grid(row = 4, column = 2, padx = (30,",
"self.txt2.delete(0, END) return self.btn = Button(window, text = \"Addition\", command = add) self.btn.grid(row",
"int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def",
"state = \"readonly\") self.txt3.grid(row = 5, column = 3) self.btn_clr = Button(window, text=\"Clear\",",
"from tkinter import * window = Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class",
"multiply(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END)",
"= int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return",
"command = add) self.btn.grid(row = 4, column = 1, pady = 20) self.btn2",
"Label(window, text = \"Simple Calculator\") self.lbl1.grid(row = 0, columnspan = 5, pady =",
"int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state =",
"ans = num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub():",
"column = 2) self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row = 5, column",
"3, column = 3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get())",
"= 4, column = 1, pady = 20) self.btn2 = Button(window, text =",
"Label(window, text = \"Enter 1st Number: \") self.lbl2.grid(row = 2, column = 2)",
"= \"Result: \") self.lbl4.grid(row = 5, column = 2) self.txt3 = Entry(window, state",
"\") self.lbl2.grid(row = 2, column = 2) self.lbl3 = Label(window, text = \"Enter",
"= Button(window, text = \"Multiplication\", command = multiply) self.btn3.grid(row = 4, column =",
"num2 = int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\")",
"self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END)",
"self.btn4.grid(row = 4, column = 4, padx = 15) self.lbl4 = Label(window, text",
"END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0,",
"self.btn = Button(window, text = \"Addition\", command = add) self.btn.grid(row = 4, column",
"= (10, 20)) self.lbl2 = Label(window, text = \"Enter 1st Number: \") self.lbl2.grid(row",
"num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def",
"\"Result: \") self.lbl4.grid(row = 5, column = 2) self.txt3 = Entry(window, state =",
"= \"Addition\", command = add) self.btn.grid(row = 4, column = 1, pady =",
"= num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state",
"\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END,",
"tkinter import * window = Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow:",
"END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 =",
"END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get())",
"text = \"Enter 2nd Number: \") self.lbl3.grid(row = 3, column = 2) self.txt",
"(30, 15)) self.btn3 = Button(window, text = \"Multiplication\", command = multiply) self.btn3.grid(row =",
"20)) self.lbl2 = Label(window, text = \"Enter 1st Number: \") self.lbl2.grid(row = 2,",
"padx = 15) self.lbl4 = Label(window, text = \"Result: \") self.lbl4.grid(row = 5,",
"self.txt.delete(0, END) self.txt2.delete(0, END) return self.btn = Button(window, text = \"Addition\", command =",
"(10, 20)) self.lbl2 = Label(window, text = \"Enter 1st Number: \") self.lbl2.grid(row =",
"= 2, padx = (30, 15)) self.btn3 = Button(window, text = \"Multiplication\", command",
"num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get())",
"3) self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column = 3, pady =",
"self.btn_clr = Button(window, text=\"Clear\", command= btnclr) self.btn_clr.grid(row=13, column = 3, pady = 5)",
"text = \"Enter 1st Number: \") self.lbl2.grid(row = 2, column = 2) self.lbl3",
"int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans))",
"int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def",
"num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0, END)",
"= \"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\")",
"= 20) self.btn2 = Button(window, text = \"Subtraction\", command = sub) self.btn2.grid(row =",
"column = 2) self.lbl3 = Label(window, text = \"Enter 2nd Number: \") self.lbl3.grid(row",
"def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 +",
"\"Addition\", command = add) self.btn.grid(row = 4, column = 1, pady = 20)",
"num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1*num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans))",
"str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2 =",
"= Button(window, text = \"Addition\", command = add) self.btn.grid(row = 4, column =",
"Button(window, text = \"Division\", command = divide) self.btn4.grid(row = 4, column = 4,",
"2, column = 3) self.txt2 = Entry(window, bd = 3) self.txt2.grid(row = 3,",
"15)) self.btn3 = Button(window, text = \"Multiplication\", command = multiply) self.btn3.grid(row = 4,",
"self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get())",
"self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1 + num2 self.txt3.delete(0,",
"Number: \") self.lbl3.grid(row = 3, column = 2) self.txt = Entry(window, bd =",
"self.txt.grid(row = 2, column = 3) self.txt2 = Entry(window, bd = 3) self.txt2.grid(row",
"Label(window, text = \"Enter 2nd Number: \") self.lbl3.grid(row = 3, column = 2)",
"= 4, column = 3) self.btn4 = Button(window, text = \"Division\", command =",
"column = 1, pady = 20) self.btn2 = Button(window, text = \"Subtraction\", command",
"4, column = 1, pady = 20) self.btn2 = Button(window, text = \"Subtraction\",",
"2nd Number: \") self.lbl3.grid(row = 3, column = 2) self.txt = Entry(window, bd",
"\"Enter 1st Number: \") self.lbl2.grid(row = 2, column = 2) self.lbl3 = Label(window,",
"self.lbl4 = Label(window, text = \"Result: \") self.lbl4.grid(row = 5, column = 2)",
"text = \"Result: \") self.lbl4.grid(row = 5, column = 2) self.txt3 = Entry(window,",
"ans = num1/num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr():",
"5, column = 2) self.txt3 = Entry(window, state = \"readonly\") self.txt3.grid(row = 5,",
"* window = Tk() window.title(\"Simple Calculator\") window.geometry(\"400x300+20+10\") window.grid_columnconfigure(0, weight=1) class MyWindow: def __init__(self,",
"= \"Enter 1st Number: \") self.lbl2.grid(row = 2, column = 2) self.lbl3 =",
"str(ans)) self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans",
"3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans = num1",
"self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2",
"3) self.btn4 = Button(window, text = \"Division\", command = divide) self.btn4.grid(row = 4,",
"2) self.lbl3 = Label(window, text = \"Enter 2nd Number: \") self.lbl3.grid(row = 3,",
"self.txt3.configure(state=\"disabled\") return def sub(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans =",
"= 4, padx = 15) self.lbl4 = Label(window, text = \"Result: \") self.lbl4.grid(row",
"= int(self.txt2.get()) ans = num1 - num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return",
"- num2 self.txt3.delete(0, END) self.txt3.insert(END, str(ans)) self.txt3.configure(state=\"disabled\") return def multiply(): self.txt3.configure(state=\"normal\") num1 =",
"str(ans)) self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state",
"self.txt3.configure(state = \"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state =",
"\"Division\", command = divide) self.btn4.grid(row = 4, column = 4, padx = 15)",
"3) self.txt.grid(row = 2, column = 3) self.txt2 = Entry(window, bd = 3)",
"Calculator\") self.lbl1.grid(row = 0, columnspan = 5, pady = (10, 20)) self.lbl2 =",
"3) self.txt2.grid(row = 3, column = 3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get())",
"= 3, column = 2) self.txt = Entry(window, bd = 3) self.txt.grid(row =",
"self.btn2 = Button(window, text = \"Subtraction\", command = sub) self.btn2.grid(row = 4, column",
"\"disabled\") return def btnclr(): self.txt3.configure(state = \"normal\") self.txt3.delete(0, END) self.txt3.configure(state = \"disabled\") self.txt.delete(0,",
"= 3, column = 3) def add(): self.txt3.configure(state=\"normal\") num1 = int(self.txt.get()) num2 =",
"def divide(): self.txt3.configure(state = \"normal\") num1 = int(self.txt.get()) num2 = int(self.txt2.get()) ans =",
"self.txt = Entry(window, bd = 3) self.txt.grid(row = 2, column = 3) self.txt2",
"= Entry(window, bd = 3) self.txt2.grid(row = 3, column = 3) def add():",
"pady = (10, 20)) self.lbl2 = Label(window, text = \"Enter 1st Number: \")",
"MyWindow: def __init__(self, window): self.lbl1 = Label(window, text = \"Simple Calculator\") self.lbl1.grid(row ="
] |
[
"a license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO IMPLEMENT TOPSIS',",
"Development :: Build Tools', 'License :: OSI Approved :: MIT License', # Again,",
"as \"name\" version = '0.5', # Start with a small number and increase",
"'<EMAIL>', # Type in your E-Mail install_requires=[ # I get to this in",
"here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO IMPLEMENT TOPSIS', # Give a",
"3 - Alpha', # Chose either \"3 - Alpha\", \"4 - Beta\" or",
"'<NAME>', # Type in your name author_email = '<EMAIL>', # Type in your",
"the current state of your package 'Intended Audience :: Developers', # Define that",
"current state of your package 'Intended Audience :: Developers', # Define that your",
"the same as \"name\" version = '0.5', # Start with a small number",
"I get to this in a second 'pandas', 'numpy', ], classifiers=[ 'Development Status",
"you named your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the same",
"TO IMPLEMENT TOPSIS', # Give a short description about your library long_description=readme(), long_description_content_type=\"text/markdown\",",
"= '<NAME>', # Type in your name author_email = '<EMAIL>', # Type in",
"open('README.md') as f: README = f.read() return README setup( name = 'TOPSIS_ANUJ_101803638', #",
"# Type in your name author_email = '<EMAIL>', # Type in your E-Mail",
"Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', #",
"you make license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS",
"https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO IMPLEMENT TOPSIS', # Give a short",
"Python :: 3', #Specify which pyhton versions that you want to support 'Programming",
"Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language",
"increase it with every change you make license='MIT', # Chose a license from",
":: Python :: 3', #Specify which pyhton versions that you want to support",
"License', # Again, pick a license 'Programming Language :: Python :: 3', #Specify",
"Start with a small number and increase it with every change you make",
"# Chose a license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO",
"'numpy', ], classifiers=[ 'Development Status :: 3 - Alpha', # Chose either \"3",
"'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ],",
"= '0.5', # Start with a small number and increase it with every",
"Chose either \"3 - Alpha\", \"4 - Beta\" or \"5 - Production/Stable\" as",
"Language :: Python :: 3', #Specify which pyhton versions that you want to",
":: 3 - Alpha', # Chose either \"3 - Alpha\", \"4 - Beta\"",
"author_email = '<EMAIL>', # Type in your E-Mail install_requires=[ # I get to",
"# Give a short description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>',",
":: MIT License', # Again, pick a license 'Programming Language :: Python ::",
"in your E-Mail install_requires=[ # I get to this in a second 'pandas',",
"and increase it with every change you make license='MIT', # Chose a license",
"as the current state of your package 'Intended Audience :: Developers', # Define",
"author = '<NAME>', # Type in your name author_email = '<EMAIL>', # Type",
"a short description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type",
"Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', ], )",
"pick a license 'Programming Language :: Python :: 3', #Specify which pyhton versions",
"with a small number and increase it with every change you make license='MIT',",
"README = f.read() return README setup( name = 'TOPSIS_ANUJ_101803638', # How you named",
":: 3', #Specify which pyhton versions that you want to support 'Programming Language",
"your package 'Intended Audience :: Developers', # Define that your audience are developers",
"your E-Mail install_requires=[ # I get to this in a second 'pandas', 'numpy',",
"classifiers=[ 'Development Status :: 3 - Alpha', # Chose either \"3 - Alpha\",",
"packages = ['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\" version = '0.5', #",
"package 'Intended Audience :: Developers', # Define that your audience are developers 'Topic",
"# Chose the same as \"name\" version = '0.5', # Start with a",
"from setuptools import setup def readme(): with open('README.md') as f: README = f.read()",
"# Define that your audience are developers 'Topic :: Software Development :: Build",
"['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\" version = '0.5', # Start with",
"or \"5 - Production/Stable\" as the current state of your package 'Intended Audience",
"license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO IMPLEMENT TOPSIS', #",
"= f.read() return README setup( name = 'TOPSIS_ANUJ_101803638', # How you named your",
"'License :: OSI Approved :: MIT License', # Again, pick a license 'Programming",
"your name author_email = '<EMAIL>', # Type in your E-Mail install_requires=[ # I",
"as f: README = f.read() return README setup( name = 'TOPSIS_ANUJ_101803638', # How",
"second 'pandas', 'numpy', ], classifiers=[ 'Development Status :: 3 - Alpha', # Chose",
"versions that you want to support 'Programming Language :: Python :: 3.4', 'Programming",
"Developers', # Define that your audience are developers 'Topic :: Software Development ::",
":: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language ::",
"version = '0.5', # Start with a small number and increase it with",
"Alpha\", \"4 - Beta\" or \"5 - Production/Stable\" as the current state of",
"Give a short description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', #",
"your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\"",
":: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python ::",
"this in a second 'pandas', 'numpy', ], classifiers=[ 'Development Status :: 3 -",
"it with every change you make license='MIT', # Chose a license from here:",
"named your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the same as",
"number and increase it with every change you make license='MIT', # Chose a",
"every change you make license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository description",
"change you make license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository description =",
"in a second 'pandas', 'numpy', ], classifiers=[ 'Development Status :: 3 - Alpha',",
"to support 'Programming Language :: Python :: 3.4', 'Programming Language :: Python ::",
"setup def readme(): with open('README.md') as f: README = f.read() return README setup(",
"\"4 - Beta\" or \"5 - Production/Stable\" as the current state of your",
"with every change you make license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository",
"3', #Specify which pyhton versions that you want to support 'Programming Language ::",
"folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\" version =",
"support 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5',",
"\"5 - Production/Stable\" as the current state of your package 'Intended Audience ::",
"'TOPSIS_ANUJ_101803638', # How you named your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], #",
"description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in your",
"How you named your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the",
"'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT",
":: Build Tools', 'License :: OSI Approved :: MIT License', # Again, pick",
"return README setup( name = 'TOPSIS_ANUJ_101803638', # How you named your package folder",
"are developers 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved",
"install_requires=[ # I get to this in a second 'pandas', 'numpy', ], classifiers=[",
"description = 'THIS PACKAGE IS TO IMPLEMENT TOPSIS', # Give a short description",
"# Again, pick a license 'Programming Language :: Python :: 3', #Specify which",
"you want to support 'Programming Language :: Python :: 3.4', 'Programming Language ::",
"Build Tools', 'License :: OSI Approved :: MIT License', # Again, pick a",
"long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in your name author_email = '<EMAIL>',",
"want to support 'Programming Language :: Python :: 3.4', 'Programming Language :: Python",
"# I get to this in a second 'pandas', 'numpy', ], classifiers=[ 'Development",
"= '<EMAIL>', # Type in your E-Mail install_requires=[ # I get to this",
"\"3 - Alpha\", \"4 - Beta\" or \"5 - Production/Stable\" as the current",
"], classifiers=[ 'Development Status :: 3 - Alpha', # Chose either \"3 -",
"with open('README.md') as f: README = f.read() return README setup( name = 'TOPSIS_ANUJ_101803638',",
"(MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\" version = '0.5',",
"long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in your name author_email = '<EMAIL>', #",
"setup( name = 'TOPSIS_ANUJ_101803638', # How you named your package folder (MyLib) packages",
"= 'THIS PACKAGE IS TO IMPLEMENT TOPSIS', # Give a short description about",
"a license 'Programming Language :: Python :: 3', #Specify which pyhton versions that",
"Define that your audience are developers 'Topic :: Software Development :: Build Tools',",
"#Specify which pyhton versions that you want to support 'Programming Language :: Python",
"developers 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved ::",
"# How you named your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose",
"# Chose either \"3 - Alpha\", \"4 - Beta\" or \"5 - Production/Stable\"",
"from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO IMPLEMENT TOPSIS', # Give",
"'Development Status :: 3 - Alpha', # Chose either \"3 - Alpha\", \"4",
"Tools', 'License :: OSI Approved :: MIT License', # Again, pick a license",
"README setup( name = 'TOPSIS_ANUJ_101803638', # How you named your package folder (MyLib)",
"library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in your name author_email =",
"Production/Stable\" as the current state of your package 'Intended Audience :: Developers', #",
"that you want to support 'Programming Language :: Python :: 3.4', 'Programming Language",
"pyhton versions that you want to support 'Programming Language :: Python :: 3.4',",
"= 'TOPSIS_ANUJ_101803638', # How you named your package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'],",
"audience are developers 'Topic :: Software Development :: Build Tools', 'License :: OSI",
"license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS",
"your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in your name author_email",
"PACKAGE IS TO IMPLEMENT TOPSIS', # Give a short description about your library",
"to this in a second 'pandas', 'numpy', ], classifiers=[ 'Development Status :: 3",
"about your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in your name",
"readme(): with open('README.md') as f: README = f.read() return README setup( name =",
"= ['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\" version = '0.5', # Start",
"Audience :: Developers', # Define that your audience are developers 'Topic :: Software",
"OSI Approved :: MIT License', # Again, pick a license 'Programming Language ::",
"setuptools import setup def readme(): with open('README.md') as f: README = f.read() return",
"that your audience are developers 'Topic :: Software Development :: Build Tools', 'License",
"make license='MIT', # Chose a license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE",
"name = 'TOPSIS_ANUJ_101803638', # How you named your package folder (MyLib) packages =",
":: Developers', # Define that your audience are developers 'Topic :: Software Development",
"Type in your name author_email = '<EMAIL>', # Type in your E-Mail install_requires=[",
"of your package 'Intended Audience :: Developers', # Define that your audience are",
"'0.5', # Start with a small number and increase it with every change",
"Chose a license from here: https://help.github.com/articles/licensing-a-repository description = 'THIS PACKAGE IS TO IMPLEMENT",
"which pyhton versions that you want to support 'Programming Language :: Python ::",
"3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6',",
"f: README = f.read() return README setup( name = 'TOPSIS_ANUJ_101803638', # How you",
"'pandas', 'numpy', ], classifiers=[ 'Development Status :: 3 - Alpha', # Chose either",
"Alpha', # Chose either \"3 - Alpha\", \"4 - Beta\" or \"5 -",
"a second 'pandas', 'numpy', ], classifiers=[ 'Development Status :: 3 - Alpha', #",
"Status :: 3 - Alpha', # Chose either \"3 - Alpha\", \"4 -",
"Again, pick a license 'Programming Language :: Python :: 3', #Specify which pyhton",
"MIT License', # Again, pick a license 'Programming Language :: Python :: 3',",
"a small number and increase it with every change you make license='MIT', #",
"Type in your E-Mail install_requires=[ # I get to this in a second",
"- Beta\" or \"5 - Production/Stable\" as the current state of your package",
"- Production/Stable\" as the current state of your package 'Intended Audience :: Developers',",
"Approved :: MIT License', # Again, pick a license 'Programming Language :: Python",
"'Programming Language :: Python :: 3', #Specify which pyhton versions that you want",
"def readme(): with open('README.md') as f: README = f.read() return README setup( name",
"f.read() return README setup( name = 'TOPSIS_ANUJ_101803638', # How you named your package",
"# Type in your E-Mail install_requires=[ # I get to this in a",
"- Alpha\", \"4 - Beta\" or \"5 - Production/Stable\" as the current state",
":: OSI Approved :: MIT License', # Again, pick a license 'Programming Language",
"'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming",
"Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python",
"E-Mail install_requires=[ # I get to this in a second 'pandas', 'numpy', ],",
"Chose the same as \"name\" version = '0.5', # Start with a small",
"\"name\" version = '0.5', # Start with a small number and increase it",
"import setup def readme(): with open('README.md') as f: README = f.read() return README",
"TOPSIS', # Give a short description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author =",
"your audience are developers 'Topic :: Software Development :: Build Tools', 'License ::",
"short description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author = '<NAME>', # Type in",
"either \"3 - Alpha\", \"4 - Beta\" or \"5 - Production/Stable\" as the",
"Beta\" or \"5 - Production/Stable\" as the current state of your package 'Intended",
"IS TO IMPLEMENT TOPSIS', # Give a short description about your library long_description=readme(),",
":: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License',",
"'THIS PACKAGE IS TO IMPLEMENT TOPSIS', # Give a short description about your",
"same as \"name\" version = '0.5', # Start with a small number and",
"'Intended Audience :: Developers', # Define that your audience are developers 'Topic ::",
"state of your package 'Intended Audience :: Developers', # Define that your audience",
"small number and increase it with every change you make license='MIT', # Chose",
"- Alpha', # Chose either \"3 - Alpha\", \"4 - Beta\" or \"5",
"# Start with a small number and increase it with every change you",
"IMPLEMENT TOPSIS', # Give a short description about your library long_description=readme(), long_description_content_type=\"text/markdown\", author",
"get to this in a second 'pandas', 'numpy', ], classifiers=[ 'Development Status ::",
"in your name author_email = '<EMAIL>', # Type in your E-Mail install_requires=[ #",
"name author_email = '<EMAIL>', # Type in your E-Mail install_requires=[ # I get",
"package folder (MyLib) packages = ['TOPSIS_ANUJ_101803638'], # Chose the same as \"name\" version",
"license 'Programming Language :: Python :: 3', #Specify which pyhton versions that you"
] |
[
"8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22,",
"22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"'0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data':",
"'0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"'0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'],",
"8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24,",
"'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 },",
"}, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed':",
"{ 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"[ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185',",
"{ 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False,",
"'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value':",
"[ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed':",
"'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address':",
"'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address':",
"'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash':",
"'0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23,",
"'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex':",
"'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics':",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',",
"'0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0 } SWAP_RECEIPT = {",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [",
"'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 },",
"'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"'0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data':",
"False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, {",
"'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"[ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000',",
"'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash':",
"'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics':",
"False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, {",
"}, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed':",
"{ 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False,",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to':",
"{ 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False,",
"37, 'value': 0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None,",
"'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex':",
"'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address':",
"'0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"'0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18',",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40',",
"'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex':",
"'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash':",
"24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ],",
"= { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed':",
"'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 },",
"'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics':",
"'0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"'0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1,",
"'0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to':",
"872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b',",
"], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex':",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 },",
"23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11',",
"'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v':",
"'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics':",
"'0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data':",
"'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11'",
"}, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed':",
"'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ],",
"False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, {",
"'0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [",
"'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11'",
"21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13',",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',",
"'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics':",
"'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash':",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs':",
"SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b',",
"'0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"'v': 37, 'value': 0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress':",
"11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash':",
"], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',",
"'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"'transactionIndex': 8, 'v': 37, 'value': 0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash':",
"11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address':",
"'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f',",
"'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0 } SWAP_RECEIPT = { 'blockHash':",
"[ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2',",
"'s': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0 } SWAP_RECEIPT =",
"<reponame>mikeshultz/py4byte<filename>test/const.py # flake8: noqa SWAP_TRANSACTION = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b',",
"'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash':",
"'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data':",
"18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"'0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"}, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed':",
"], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185',",
"'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash':",
"85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5',",
"'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics':",
"}, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed':",
"'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0 }",
"'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash':",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input':",
"'0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [",
"'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26',",
"'0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, {",
"'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address':",
"'0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [",
"'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex':",
"'value': 0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed':",
"'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data':",
"['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionHash':",
"8, 'v': 37, 'value': 0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"'0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status':",
"flake8: noqa SWAP_TRANSACTION = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049,",
"69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',",
"'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11',",
"'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b'",
"], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"'0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37,",
"[ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11',",
"0 } SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286,",
"20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, {",
"'0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [",
"'0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ],",
"'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics':",
"'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash':",
"'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash':",
"False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, {",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b',",
"False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185',",
"'0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [",
"}, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed':",
"], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697,",
"11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ],",
"8 }, { 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19,",
"'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address':",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',",
"'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"= { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash':",
"'0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8, 'v': 37, 'value': 0 } SWAP_RECEIPT",
"'0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8",
"[ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21,",
"{ 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False,",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d',",
"'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b'",
"174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex':",
"{ 'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False,",
"'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18,",
"'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address':",
"SWAP_TRANSACTION = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000,",
"'0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185'",
"'transactionIndex': 8 } ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex':",
"} SWAP_RECEIPT = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from':",
"'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r':",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 21, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822',",
"'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex':",
"'0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash':",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000',",
"'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822',",
"'0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18', 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionIndex': 8,",
"'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000',",
"# flake8: noqa SWAP_TRANSACTION = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas':",
"11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ],",
"'0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ],",
"'0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"{ 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False,",
"'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ], 'logsBloom':",
"11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85,",
"'0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000', 'logIndex': 24, 'removed': False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d',",
"'0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85',",
"{ 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314,",
"False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',",
"noqa SWAP_TRANSACTION = { 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice':",
"'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce': 85, 'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85', 's':",
"'blockNumber': 11325697, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gas': 203049, 'gasPrice': 69000000000, 'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f', 'nonce':",
"'0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000', 'logIndex': 18, 'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b',",
"[ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F',",
"'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40', 'logIndex': 23, 'removed': False, 'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 },",
"} ], 'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000', 'status': 1, 'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D', 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }",
"'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash':",
"'0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 }, { 'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber':",
"'blockNumber': 11325697, 'contractAddress': None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ {",
"None, 'cumulativeGasUsed': 872286, 'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b', 'gasUsed': 174314, 'logs': [ { 'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26', 'blockHash':",
"'0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b', 'logIndex': 19, 'removed': False, 'topics': [",
"{ 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex': 22, 'removed': False,",
"'transactionIndex': 8 }, { 'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5', 'logIndex':",
"False, 'topics': [ '0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822', '0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 } ],",
"'removed': False, 'topics': [ '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef', '0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b', '0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364', 'transactionIndex': 8 },",
"11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20, 'removed': False, 'topics': [ '0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1' ], 'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',",
"8 }, { 'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185', 'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3', 'blockNumber': 11325697, 'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13', 'logIndex': 20,"
] |
[
"acquired req.release() # context manager with lock.acquire() as req: req.wait() # .. do",
"locks asynchronously - Allows locks to be acquired in priority order Examples:: lock",
"fut = self.lock_queue.get() with fut._acq_lock: if fut._released: # future has already been released;",
"def _lock_loop(self): while True: # wait for lock to become available self.unlock_event.wait() #",
"while lock is acquired \"\"\" def __init__(self, name=None): self.name = name self.req_count =",
"locked first. \"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count),",
"@property def released(self): \"\"\"If True, then this request has released its lock (if",
"\"\"\" def __init__(self, name=None): self.name = name self.req_count = Counter() self.lock_queue = queue.PriorityQueue()",
"request. If the lock is currently acquired, then it is released and another",
"never acquire the lock again. \"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def",
"order Examples:: lock = PriorityLock() # manual lock / wait / release req",
"% (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe counter, returns the next",
"lock.acquire() req.wait() # wait for lock to be acquired # .. do stuff",
"when the lock is acquired. Higher priority values will be locked first. \"\"\"",
"class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name",
"= queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start()",
"to be acquired in priority order Examples:: lock = PriorityLock() # manual lock",
"print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self, fut): with fut._acq_lock:",
"next integer every time next() is called. \"\"\" def __init__(self): self.value = 0",
"True, then this request has released its lock (if any) and can never",
"futures for acquiring locks asynchronously - Allows locks to be acquired in priority",
"return self def __next__(self): # for py3 with self.lock: self.value += 1 return",
"counter, returns the next integer every time next() is called. \"\"\" def __init__(self):",
"lock is not already acquired, then this request is simply cancelled and will",
"def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return self def",
"that completes when the lock is acquired. Higher priority values will be locked",
"a thread-safe counter, returns the next integer every time next() is called. \"\"\"",
"wait for lock to be acquired # .. do stuff while lock is",
"lock. \"\"\" mutex = self.mutex() if mutex is None: return mutex._release_lock(self) def _taskDone(self,",
"print(\"release request:\", fut) if fut.released: return fut._released = True if fut.acquired: # print(\"release",
"fut)) return fut def _release_lock(self, fut): with fut._acq_lock: # print(\"release request:\", fut) if",
"def released(self): \"\"\"If True, then this request has released its lock (if any)",
"released; don't assign lock continue # assign lock to this request # print(\"assign",
"queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def",
"def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired or self.released) else",
"import weakref from threading import Lock, Thread, Event from six.moves import queue from",
"from .future import Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority queueing.",
"fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self, fut): with fut._acq_lock: # print(\"release",
"self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a",
"self._acq_lock = Lock() self._wait_event = Event() self._acquired = False self._released = False @property",
"%s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe counter, returns",
"return self._acquired @property def released(self): \"\"\"If True, then this request has released its",
"and can never acquire the lock again. \"\"\" return self._released def _wait(self, timeout):",
"mutex = self.mutex() if mutex is None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds):",
"0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe counter, returns the",
"\"\"\"If True, then this request has released its lock (if any) and can",
"True: # wait for lock to become available self.unlock_event.wait() # get next lock",
"def __init__(self, name=None): self.name = name self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event",
"is to provide a mutex that: - Uses futures for acquiring locks asynchronously",
"= Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0,",
"if (self.acquired or self.released) else 0 def release(self): \"\"\"Release this lock request. If",
"lock request. If the lock is currently acquired, then it is released and",
"if fut.acquired: # print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def",
"0 def release(self): \"\"\"Release this lock request. If the lock is currently acquired,",
"lock.acquire() as req: req.wait() # .. do stuff while lock is acquired \"\"\"",
"py3 with self.lock: self.value += 1 return self.value - 1 def next(self): #",
"if fut._released: # future has already been released; don't assign lock continue #",
"request # print(\"assign lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break def __repr__(self):",
"asynchronously - Allows locks to be acquired in priority order Examples:: lock =",
"class PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority queueing. The purpose of this",
"acquired(self): \"\"\"If True, then this request currently has the lock acquired and prevents",
"of this class is to provide a mutex that: - Uses futures for",
"True if fut.acquired: # print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True)",
"acquire a lock. \"\"\" mutex = self.mutex() if mutex is None: return mutex._release_lock(self)",
"already acquired, then this request is simply cancelled and will never acquire a",
"queued request may acquire the lock in turn. If the lock is not",
"__repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a",
"with asynchronous locking and priority queueing. The purpose of this class is to",
"released its lock (if any) and can never acquire the lock again. \"\"\"",
"to become available self.unlock_event.wait() # get next lock request while True: _, _,",
"self.mutex = weakref.ref(mutex) self.name = name self._acq_lock = Lock() self._wait_event = Event() self._acquired",
"request:\", fut) if fut.released: return fut._released = True if fut.acquired: # print(\"release lock:\",",
"is acquired req.release() # context manager with lock.acquire() as req: req.wait() # ..",
"True, then this request currently has the lock acquired and prevents other requests",
"request is simply cancelled and will never acquire a lock. \"\"\" mutex =",
"already been released; don't assign lock continue # assign lock to this request",
"\"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired",
"fut._released = True if fut.acquired: # print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set()",
"thread-safe counter, returns the next integer every time next() is called. \"\"\" def",
"is acquired \"\"\" def __init__(self, name=None): self.name = name self.req_count = Counter() self.lock_queue",
"# for py3 with self.lock: self.value += 1 return self.value - 1 def",
"then this request is simply cancelled and will never acquire a lock. \"\"\"",
"= Lock() self._wait_event = Event() self._acquired = False self._released = False @property def",
"simply cancelled and will never acquire a lock. \"\"\" mutex = self.mutex() if",
"this request # print(\"assign lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break def",
"from __future__ import print_function, division import weakref from threading import Lock, Thread, Event",
"provide a mutex that: - Uses futures for acquiring locks asynchronously - Allows",
"_release_lock(self, fut): with fut._acq_lock: # print(\"release request:\", fut) if fut.released: return fut._released =",
"lock is acquired \"\"\" def __init__(self, name=None): self.name = name self.req_count = Counter()",
"and another queued request may acquire the lock in turn. If the lock",
"fut) if fut.released: return fut._released = True if fut.acquired: # print(\"release lock:\", fut)",
"else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait for lock to become available",
"0 self.lock = Lock() def __iter__(self): return self def __next__(self): # for py3",
"not already acquired, then this request is simply cancelled and will never acquire",
"lock again. \"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100",
"wait for lock to become available self.unlock_event.wait() # get next lock request while",
"__exit__(self, *args): self.release() def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self))",
"Event from six.moves import queue from .future import Future class PriorityLock(object): \"\"\"Mutex with",
"from acquiring the lock. \"\"\" return self._acquired @property def released(self): \"\"\"If True, then",
"Future that completes when the lock is acquired. Higher priority values will be",
"self.name = name self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set()",
"with fut._acq_lock: # print(\"release request:\", fut) if fut.released: return fut._released = True if",
"self.mutex() if mutex is None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self,",
"return self def __exit__(self, *args): self.release() def __repr__(self): return \"<%s %s 0x%x>\" %",
"True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name,",
"self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self,",
"def __init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name self._acq_lock =",
"# print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self, fut): with",
"request while True: _, _, fut = self.lock_queue.get() with fut._acq_lock: if fut._released: #",
"Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon =",
"% (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex =",
"\"\"\" return self._acquired @property def released(self): \"\"\"If True, then this request has released",
"available self.unlock_event.wait() # get next lock request while True: _, _, fut =",
"self.name = name self._acq_lock = Lock() self._wait_event = Event() self._acquired = False self._released",
"returns the next integer every time next() is called. \"\"\" def __init__(self): self.value",
"self._acquired = False self._released = False @property def acquired(self): \"\"\"If True, then this",
"release(self): \"\"\"Release this lock request. If the lock is currently acquired, then it",
"acquired \"\"\" def __init__(self, name=None): self.name = name self.req_count = Counter() self.lock_queue =",
"If the lock is currently acquired, then it is released and another queued",
"locking and priority queueing. The purpose of this class is to provide a",
"the lock acquired and prevents other requests from acquiring the lock. \"\"\" return",
"self.release() def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object):",
"import print_function, division import weakref from threading import Lock, Thread, Event from six.moves",
"__enter__(self): return self def __exit__(self, *args): self.release() def __repr__(self): return \"<%s %s 0x%x>\"",
"division import weakref from threading import Lock, Thread, Event from six.moves import queue",
"self._released = False @property def acquired(self): \"\"\"If True, then this request currently has",
"False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait for lock to",
"first. \"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut))",
"manual lock / wait / release req = lock.acquire() req.wait() # wait for",
"do stuff while lock is acquired req.release() # context manager with lock.acquire() as",
"and will never acquire a lock. \"\"\" mutex = self.mutex() if mutex is",
"acquired in priority order Examples:: lock = PriorityLock() # manual lock / wait",
"that: - Uses futures for acquiring locks asynchronously - Allows locks to be",
"__next__(self): # for py3 with self.lock: self.value += 1 return self.value - 1",
"__future__ import print_function, division import weakref from threading import Lock, Thread, Event from",
"stuff while lock is acquired req.release() # context manager with lock.acquire() as req:",
"return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return",
"the lock. \"\"\" return self._acquired @property def released(self): \"\"\"If True, then this request",
"has released its lock (if any) and can never acquire the lock again.",
"self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return self def __exit__(self, *args): self.release() def",
"acquired and prevents other requests from acquiring the lock. \"\"\" return self._acquired @property",
"# .. do stuff while lock is acquired req.release() # context manager with",
"percentDone(self): return 100 if (self.acquired or self.released) else 0 def release(self): \"\"\"Release this",
"the lock in turn. If the lock is not already acquired, then this",
"lock is acquired req.release() # context manager with lock.acquire() as req: req.wait() #",
"for lock to become available self.unlock_event.wait() # get next lock request while True:",
"then it is released and another queued request may acquire the lock in",
"lock to be acquired # .. do stuff while lock is acquired req.release()",
"from threading import Lock, Thread, Event from six.moves import queue from .future import",
"Allows locks to be acquired in priority order Examples:: lock = PriorityLock() #",
"= Event() self._acquired = False self._released = False @property def acquired(self): \"\"\"If True,",
"= Lock() def __iter__(self): return self def __next__(self): # for py3 with self.lock:",
"import Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority queueing. The purpose",
"for acquiring locks asynchronously - Allows locks to be acquired in priority order",
"return fut._released = True if fut.acquired: # print(\"release lock:\", fut) fut._acquired = False",
"True: _, _, fut = self.lock_queue.get() with fut._acq_lock: if fut._released: # future has",
"weakref from threading import Lock, Thread, Event from six.moves import queue from .future",
"/ release req = lock.acquire() req.wait() # wait for lock to be acquired",
"lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self, fut): with fut._acq_lock: #",
"mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name self._acq_lock = Lock() self._wait_event",
"\"\"\"Just a thread-safe counter, returns the next integer every time next() is called.",
"fut = PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut",
"assign lock to this request # print(\"assign lock:\", fut) fut._acquired = True fut._taskDone()",
"for py3 with self.lock: self.value += 1 return self.value - 1 def next(self):",
"lock = PriorityLock() # manual lock / wait / release req = lock.acquire()",
"become available self.unlock_event.wait() # get next lock request while True: _, _, fut",
"self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait for lock to become",
"time next() is called. \"\"\" def __init__(self): self.value = 0 self.lock = Lock()",
"lock / wait / release req = lock.acquire() req.wait() # wait for lock",
"PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority queueing. The purpose of this class",
"self def __next__(self): # for py3 with self.lock: self.value += 1 return self.value",
"lock is acquired. Higher priority values will be locked first. \"\"\" fut =",
"<reponame>campagnola/acq4 from __future__ import print_function, division import weakref from threading import Lock, Thread,",
"__init__(self, name=None): self.name = name self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event =",
"PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self,",
"# .. do stuff while lock is acquired \"\"\" def __init__(self, name=None): self.name",
"has the lock acquired and prevents other requests from acquiring the lock. \"\"\"",
"a mutex that: - Uses futures for acquiring locks asynchronously - Allows locks",
"released and another queued request may acquire the lock in turn. If the",
"import queue from .future import Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking and",
"turn. If the lock is not already acquired, then this request is simply",
"other requests from acquiring the lock. \"\"\" return self._acquired @property def released(self): \"\"\"If",
"fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait for",
"next() is called. \"\"\" def __init__(self): self.value = 0 self.lock = Lock() def",
"this class is to provide a mutex that: - Uses futures for acquiring",
"True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a Future that completes when the",
"@property def acquired(self): \"\"\"If True, then this request currently has the lock acquired",
"the lock is acquired. Higher priority values will be locked first. \"\"\" fut",
"every time next() is called. \"\"\" def __init__(self): self.value = 0 self.lock =",
"acquired. Higher priority values will be locked first. \"\"\" fut = PriorityLockRequest(self, name=name)",
"fut._acq_lock: if fut._released: # future has already been released; don't assign lock continue",
"_wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired or self.released) else 0",
"currently has the lock acquired and prevents other requests from acquiring the lock.",
"= Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a Future",
"return fut def _release_lock(self, fut): with fut._acq_lock: # print(\"release request:\", fut) if fut.released:",
"mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return self",
"self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self, fut): with fut._acq_lock: # print(\"release request:\",",
"self._acquired @property def released(self): \"\"\"If True, then this request has released its lock",
"- Uses futures for acquiring locks asynchronously - Allows locks to be acquired",
"def acquire(self, priority=0, name=None): \"\"\"Return a Future that completes when the lock is",
"queueing. The purpose of this class is to provide a mutex that: -",
"Future._taskDone(self, *args, **kwds) def __enter__(self): return self def __exit__(self, *args): self.release() def __repr__(self):",
"def __iter__(self): return self def __next__(self): # for py3 with self.lock: self.value +=",
"Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name self._acq_lock = Lock() self._wait_event = Event()",
"requests from acquiring the lock. \"\"\" return self._acquired @property def released(self): \"\"\"If True,",
"acquire the lock again. \"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self):",
"or self.released) else 0 def release(self): \"\"\"Release this lock request. If the lock",
"\"\"\" mutex = self.mutex() if mutex is None: return mutex._release_lock(self) def _taskDone(self, *args,",
"the lock is not already acquired, then this request is simply cancelled and",
"**kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return self def __exit__(self, *args): self.release()",
"print(\"assign lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s",
"is None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def",
"req = lock.acquire() req.wait() # wait for lock to be acquired # ..",
"Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority queueing. The purpose of",
"name self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread =",
"= Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon",
"\"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name):",
"lock is currently acquired, then it is released and another queued request may",
"acquired # .. do stuff while lock is acquired req.release() # context manager",
"request may acquire the lock in turn. If the lock is not already",
"mutex that: - Uses futures for acquiring locks asynchronously - Allows locks to",
"(self.acquired or self.released) else 0 def release(self): \"\"\"Release this lock request. If the",
"\"\"\" def __init__(self): self.value = 0 self.lock = Lock() def __iter__(self): return self",
"will be locked first. \"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request lock:\", fut)",
"# print(\"release request:\", fut) if fut.released: return fut._released = True if fut.acquired: #",
"\"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe counter,",
"print_function, division import weakref from threading import Lock, Thread, Event from six.moves import",
"to provide a mutex that: - Uses futures for acquiring locks asynchronously -",
"import Lock, Thread, Event from six.moves import queue from .future import Future class",
"self.value = 0 self.lock = Lock() def __iter__(self): return self def __next__(self): #",
"context manager with lock.acquire() as req: req.wait() # .. do stuff while lock",
"be acquired in priority order Examples:: lock = PriorityLock() # manual lock /",
"# context manager with lock.acquire() as req: req.wait() # .. do stuff while",
"and priority queueing. The purpose of this class is to provide a mutex",
"wait / release req = lock.acquire() req.wait() # wait for lock to be",
"mutex is None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds)",
"+= 1 return self.value - 1 def next(self): # for py2 return self.__next__()",
"threading import Lock, Thread, Event from six.moves import queue from .future import Future",
"req.wait() # .. do stuff while lock is acquired \"\"\" def __init__(self, name=None):",
"next lock request while True: _, _, fut = self.lock_queue.get() with fut._acq_lock: if",
"and prevents other requests from acquiring the lock. \"\"\" return self._acquired @property def",
"fut._acq_lock: # print(\"release request:\", fut) if fut.released: return fut._released = True if fut.acquired:",
"name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name self._acq_lock = Lock() self._wait_event =",
"six.moves import queue from .future import Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking",
"_lock_loop(self): while True: # wait for lock to become available self.unlock_event.wait() # get",
"*args, **kwds) def __enter__(self): return self def __exit__(self, *args): self.release() def __repr__(self): return",
"fut): with fut._acq_lock: # print(\"release request:\", fut) if fut.released: return fut._released = True",
"_, _, fut = self.lock_queue.get() with fut._acq_lock: if fut._released: # future has already",
"queue from .future import Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority",
"values will be locked first. \"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request lock:\",",
"def release(self): \"\"\"Release this lock request. If the lock is currently acquired, then",
"do stuff while lock is acquired \"\"\" def __init__(self, name=None): self.name = name",
"# print(\"assign lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return",
"while lock is acquired req.release() # context manager with lock.acquire() as req: req.wait()",
"name=None): \"\"\"Return a Future that completes when the lock is acquired. Higher priority",
"asynchronous locking and priority queueing. The purpose of this class is to provide",
"request has released its lock (if any) and can never acquire the lock",
"def _release_lock(self, fut): with fut._acq_lock: # print(\"release request:\", fut) if fut.released: return fut._released",
"class is to provide a mutex that: - Uses futures for acquiring locks",
"any) and can never acquire the lock again. \"\"\" return self._released def _wait(self,",
"while True: _, _, fut = self.lock_queue.get() with fut._acq_lock: if fut._released: # future",
"= name self._acq_lock = Lock() self._wait_event = Event() self._acquired = False self._released =",
".future import Future class PriorityLock(object): \"\"\"Mutex with asynchronous locking and priority queueing. The",
"for lock to be acquired # .. do stuff while lock is acquired",
"= True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a Future that completes when",
".. do stuff while lock is acquired req.release() # context manager with lock.acquire()",
"from six.moves import queue from .future import Future class PriorityLock(object): \"\"\"Mutex with asynchronous",
"to be acquired # .. do stuff while lock is acquired req.release() #",
"completes when the lock is acquired. Higher priority values will be locked first.",
"acquire the lock in turn. If the lock is not already acquired, then",
"acquiring locks asynchronously - Allows locks to be acquired in priority order Examples::",
"get next lock request while True: _, _, fut = self.lock_queue.get() with fut._acq_lock:",
"self._wait_event = Event() self._acquired = False self._released = False @property def acquired(self): \"\"\"If",
"a lock. \"\"\" mutex = self.mutex() if mutex is None: return mutex._release_lock(self) def",
"with self.lock: self.value += 1 return self.value - 1 def next(self): # for",
"PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name self._acq_lock",
"acquiring the lock. \"\"\" return self._acquired @property def released(self): \"\"\"If True, then this",
"def __next__(self): # for py3 with self.lock: self.value += 1 return self.value -",
"lock request while True: _, _, fut = self.lock_queue.get() with fut._acq_lock: if fut._released:",
"fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self))",
"self.lock = Lock() def __iter__(self): return self def __next__(self): # for py3 with",
"in priority order Examples:: lock = PriorityLock() # manual lock / wait /",
"# wait for lock to become available self.unlock_event.wait() # get next lock request",
"request currently has the lock acquired and prevents other requests from acquiring the",
"\"\"\"Mutex with asynchronous locking and priority queueing. The purpose of this class is",
"Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0, name=None):",
"release req = lock.acquire() req.wait() # wait for lock to be acquired #",
"print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True:",
"Higher priority values will be locked first. \"\"\" fut = PriorityLockRequest(self, name=name) #",
"lock (if any) and can never acquire the lock again. \"\"\" return self._released",
"self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired or self.released) else 0 def release(self):",
"if mutex is None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args,",
"= False @property def acquired(self): \"\"\"If True, then this request currently has the",
"this request is simply cancelled and will never acquire a lock. \"\"\" mutex",
"**kwds) def __enter__(self): return self def __exit__(self, *args): self.release() def __repr__(self): return \"<%s",
"manager with lock.acquire() as req: req.wait() # .. do stuff while lock is",
"is currently acquired, then it is released and another queued request may acquire",
"if fut.released: return fut._released = True if fut.acquired: # print(\"release lock:\", fut) fut._acquired",
"__repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self,",
"= PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def",
"Uses futures for acquiring locks asynchronously - Allows locks to be acquired in",
"purpose of this class is to provide a mutex that: - Uses futures",
"\"\"\"If True, then this request currently has the lock acquired and prevents other",
"%s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self)",
"/ wait / release req = lock.acquire() req.wait() # wait for lock to",
"lock. \"\"\" return self._acquired @property def released(self): \"\"\"If True, then this request has",
"= self.mutex() if mutex is None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set()",
"it is released and another queued request may acquire the lock in turn.",
"priority values will be locked first. \"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request",
"be acquired # .. do stuff while lock is acquired req.release() # context",
"lock to this request # print(\"assign lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear()",
"return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex,",
"name=None): self.name = name self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event()",
"while True: # wait for lock to become available self.unlock_event.wait() # get next",
"def percentDone(self): return 100 if (self.acquired or self.released) else 0 def release(self): \"\"\"Release",
"return 100 if (self.acquired or self.released) else 0 def release(self): \"\"\"Release this lock",
"# future has already been released; don't assign lock continue # assign lock",
"req: req.wait() # .. do stuff while lock is acquired \"\"\" def __init__(self,",
"this request has released its lock (if any) and can never acquire the",
"req.release() # context manager with lock.acquire() as req: req.wait() # .. do stuff",
"is not already acquired, then this request is simply cancelled and will never",
"name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return fut def _release_lock(self, fut):",
"fut.released: return fut._released = True if fut.acquired: # print(\"release lock:\", fut) fut._acquired =",
"been released; don't assign lock continue # assign lock to this request #",
"__init__(self): self.value = 0 self.lock = Lock() def __iter__(self): return self def __next__(self):",
"*args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return self def __exit__(self, *args):",
"this request currently has the lock acquired and prevents other requests from acquiring",
"fut._released: # future has already been released; don't assign lock continue # assign",
"\"\"\"Release this lock request. If the lock is currently acquired, then it is",
"else 0 def release(self): \"\"\"Release this lock request. If the lock is currently",
"priority queueing. The purpose of this class is to provide a mutex that:",
"Counter(object): \"\"\"Just a thread-safe counter, returns the next integer every time next() is",
"lock:\", fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: #",
"acquired, then it is released and another queued request may acquire the lock",
"Lock() def __iter__(self): return self def __next__(self): # for py3 with self.lock: self.value",
"= 0 self.lock = Lock() def __iter__(self): return self def __next__(self): # for",
"prevents other requests from acquiring the lock. \"\"\" return self._acquired @property def released(self):",
"= True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__,",
"locks to be acquired in priority order Examples:: lock = PriorityLock() # manual",
"_, fut = self.lock_queue.get() with fut._acq_lock: if fut._released: # future has already been",
"with lock.acquire() as req: req.wait() # .. do stuff while lock is acquired",
"Event() self._acquired = False self._released = False @property def acquired(self): \"\"\"If True, then",
"future has already been released; don't assign lock continue # assign lock to",
"self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a Future that completes",
"in turn. If the lock is not already acquired, then this request is",
"currently acquired, then it is released and another queued request may acquire the",
"lock to become available self.unlock_event.wait() # get next lock request while True: _,",
"self.lock_queue.get() with fut._acq_lock: if fut._released: # future has already been released; don't assign",
"__iter__(self): return self def __next__(self): # for py3 with self.lock: self.value += 1",
"# assign lock to this request # print(\"assign lock:\", fut) fut._acquired = True",
"(self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe counter, returns the next integer",
"the next integer every time next() is called. \"\"\" def __init__(self): self.value =",
"next(self.req_count), fut)) return fut def _release_lock(self, fut): with fut._acq_lock: # print(\"release request:\", fut)",
"self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop)",
"to this request # print(\"assign lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break",
"again. \"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if",
"acquire(self, priority=0, name=None): \"\"\"Return a Future that completes when the lock is acquired.",
"is released and another queued request may acquire the lock in turn. If",
"self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired or self.released)",
"\"\"\"Return a Future that completes when the lock is acquired. Higher priority values",
"acquired, then this request is simply cancelled and will never acquire a lock.",
"don't assign lock continue # assign lock to this request # print(\"assign lock:\",",
"Examples:: lock = PriorityLock() # manual lock / wait / release req =",
"= self.lock_queue.get() with fut._acq_lock: if fut._released: # future has already been released; don't",
"will never acquire a lock. \"\"\" mutex = self.mutex() if mutex is None:",
"name self._acq_lock = Lock() self._wait_event = Event() self._acquired = False self._released = False",
"0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex",
"Lock() self._wait_event = Event() self._acquired = False self._released = False @property def acquired(self):",
"100 if (self.acquired or self.released) else 0 def release(self): \"\"\"Release this lock request.",
"then this request has released its lock (if any) and can never acquire",
"continue # assign lock to this request # print(\"assign lock:\", fut) fut._acquired =",
"= lock.acquire() req.wait() # wait for lock to be acquired # .. do",
"lock continue # assign lock to this request # print(\"assign lock:\", fut) fut._acquired",
"assign lock continue # assign lock to this request # print(\"assign lock:\", fut)",
"self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a Future that completes when the lock",
"The purpose of this class is to provide a mutex that: - Uses",
"None: return mutex._release_lock(self) def _taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self):",
".. do stuff while lock is acquired \"\"\" def __init__(self, name=None): self.name =",
"this lock request. If the lock is currently acquired, then it is released",
"= True if fut.acquired: # print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set() else:",
"_taskDone(self, *args, **kwds): self._wait_event.set() Future._taskDone(self, *args, **kwds) def __enter__(self): return self def __exit__(self,",
"If the lock is not already acquired, then this request is simply cancelled",
"its lock (if any) and can never acquire the lock again. \"\"\" return",
"class Counter(object): \"\"\"Just a thread-safe counter, returns the next integer every time next()",
"lock:\", fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s %s",
"lock acquired and prevents other requests from acquiring the lock. \"\"\" return self._acquired",
"\"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority, next(self.req_count), fut)) return",
"(if any) and can never acquire the lock again. \"\"\" return self._released def",
"# manual lock / wait / release req = lock.acquire() req.wait() # wait",
"Thread, Event from six.moves import queue from .future import Future class PriorityLock(object): \"\"\"Mutex",
"self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return",
"priority=0, name=None): \"\"\"Return a Future that completes when the lock is acquired. Higher",
"with fut._acq_lock: if fut._released: # future has already been released; don't assign lock",
"is acquired. Higher priority values will be locked first. \"\"\" fut = PriorityLockRequest(self,",
"self.unlock_event.wait() # get next lock request while True: _, _, fut = self.lock_queue.get()",
"__init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name = name self._acq_lock = Lock()",
"def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def",
"def __enter__(self): return self def __exit__(self, *args): self.release() def __repr__(self): return \"<%s %s",
"is called. \"\"\" def __init__(self): self.value = 0 self.lock = Lock() def __iter__(self):",
"Lock, Thread, Event from six.moves import queue from .future import Future class PriorityLock(object):",
"self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name",
"= False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait for lock",
"fut) fut._acquired = True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s %s 0x%x>\"",
"the lock is currently acquired, then it is released and another queued request",
"another queued request may acquire the lock in turn. If the lock is",
"may acquire the lock in turn. If the lock is not already acquired,",
"integer every time next() is called. \"\"\" def __init__(self): self.value = 0 self.lock",
"cancelled and will never acquire a lock. \"\"\" mutex = self.mutex() if mutex",
"self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe counter, returns the next integer every",
"def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just",
"self.lock: self.value += 1 return self.value - 1 def next(self): # for py2",
"is simply cancelled and will never acquire a lock. \"\"\" mutex = self.mutex()",
"fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait",
"then this request currently has the lock acquired and prevents other requests from",
"self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread = Thread(target=self._lock_loop) self.lock_thread.daemon = True",
"Thread(target=self._lock_loop) self.lock_thread.daemon = True self.lock_thread.start() def acquire(self, priority=0, name=None): \"\"\"Return a Future that",
"= False self._released = False @property def acquired(self): \"\"\"If True, then this request",
"False self._released = False @property def acquired(self): \"\"\"If True, then this request currently",
"timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired or self.released) else 0 def",
"(self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex)",
"def __init__(self): self.value = 0 self.lock = Lock() def __iter__(self): return self def",
"# wait for lock to be acquired # .. do stuff while lock",
"weakref.ref(mutex) self.name = name self._acq_lock = Lock() self._wait_event = Event() self._acquired = False",
"be locked first. \"\"\" fut = PriorityLockRequest(self, name=name) # print(\"request lock:\", fut) self.lock_queue.put((-priority,",
"the lock again. \"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return",
"has already been released; don't assign lock continue # assign lock to this",
"return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class Counter(object): \"\"\"Just a thread-safe",
"stuff while lock is acquired \"\"\" def __init__(self, name=None): self.name = name self.req_count",
"self.unlock_event.clear() break def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class",
"req.wait() # wait for lock to be acquired # .. do stuff while",
"= PriorityLock() # manual lock / wait / release req = lock.acquire() req.wait()",
"never acquire a lock. \"\"\" mutex = self.mutex() if mutex is None: return",
"break def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class PriorityLockRequest(Future):",
"PriorityLock() # manual lock / wait / release req = lock.acquire() req.wait() #",
"fut def _release_lock(self, fut): with fut._acq_lock: # print(\"release request:\", fut) if fut.released: return",
"return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout) def percentDone(self): return 100 if (self.acquired or",
"id(self)) class PriorityLockRequest(Future): def __init__(self, mutex, name): Future.__init__(self) self.mutex = weakref.ref(mutex) self.name =",
"self.value += 1 return self.value - 1 def next(self): # for py2 return",
"fut._acquired = True fut._taskDone() self.unlock_event.clear() break def __repr__(self): return \"<%s %s 0x%x>\" %",
"can never acquire the lock again. \"\"\" return self._released def _wait(self, timeout): self._wait_event.wait(timeout=timeout)",
"released(self): \"\"\"If True, then this request has released its lock (if any) and",
"priority order Examples:: lock = PriorityLock() # manual lock / wait / release",
"# print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self): while",
"lock in turn. If the lock is not already acquired, then this request",
"self.released) else 0 def release(self): \"\"\"Release this lock request. If the lock is",
"# get next lock request while True: _, _, fut = self.lock_queue.get() with",
"fut._taskDone(interrupted=True) def _lock_loop(self): while True: # wait for lock to become available self.unlock_event.wait()",
"= name self.req_count = Counter() self.lock_queue = queue.PriorityQueue() self.unlock_event = Event() self.unlock_event.set() self.lock_thread",
"def __exit__(self, *args): self.release() def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name,",
"- Allows locks to be acquired in priority order Examples:: lock = PriorityLock()",
"False @property def acquired(self): \"\"\"If True, then this request currently has the lock",
"= weakref.ref(mutex) self.name = name self._acq_lock = Lock() self._wait_event = Event() self._acquired =",
"fut.acquired: # print(\"release lock:\", fut) fut._acquired = False self.unlock_event.set() else: fut._taskDone(interrupted=True) def _lock_loop(self):",
"called. \"\"\" def __init__(self): self.value = 0 self.lock = Lock() def __iter__(self): return",
"self def __exit__(self, *args): self.release() def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__,",
"as req: req.wait() # .. do stuff while lock is acquired \"\"\" def",
"def acquired(self): \"\"\"If True, then this request currently has the lock acquired and",
"id(self)) class Counter(object): \"\"\"Just a thread-safe counter, returns the next integer every time",
"a Future that completes when the lock is acquired. Higher priority values will",
"*args): self.release() def __repr__(self): return \"<%s %s 0x%x>\" % (self.__class__.__name__, self.name, id(self)) class"
] |
[
"path to augustus path = /opt/augustus/bin/ [etraining] # path to augustus etraining path",
"redeclare it for each new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path =",
"# path to augustus etraining path = /opt/augustus/bin/ # path to augustus perl",
"[gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch]",
"to tblastn path = /usr/bin/ [makeblastdb] # path to makeblastdb path = /usr/bin/",
"# path to Rscript, if you wish to use the plot tool path",
"/opt/augustus/bin/ # path to augustus perl scripts, redeclare it for each new script",
"= {0} tmp_path = {0}/tmp [tblastn] # path to tblastn path = /usr/bin/",
"etraining path = /opt/augustus/bin/ # path to augustus perl scripts, redeclare it for",
"__future__ import print_function import os print( \"\"\"[busco] out_path = {0} tmp_path = {0}/tmp",
"path = /usr/bin/ [makeblastdb] # path to makeblastdb path = /usr/bin/ [augustus] #",
"[etraining] # path to augustus etraining path = /opt/augustus/bin/ # path to augustus",
"path = /usr/local/bin/ [Rscript] # path to Rscript, if you wish to use",
"path to augustus etraining path = /opt/augustus/bin/ # path to augustus perl scripts,",
"{0} tmp_path = {0}/tmp [tblastn] # path to tblastn path = /usr/bin/ [makeblastdb]",
"Rscript, if you wish to use the plot tool path = /usr/bin/\"\"\".format(os.environ['PWD']) )",
"path to makeblastdb path = /usr/bin/ [augustus] # path to augustus path =",
"= /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path to HMMsearch executable path",
"executable path = /usr/local/bin/ [Rscript] # path to Rscript, if you wish to",
"path to augustus perl scripts, redeclare it for each new script [gff2gbSmallDNA.pl] path",
"perl scripts, redeclare it for each new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl]",
"/usr/local/bin/ [Rscript] # path to Rscript, if you wish to use the plot",
"/usr/bin/ [hmmsearch] # path to HMMsearch executable path = /usr/local/bin/ [Rscript] # path",
"path = /usr/bin/ [augustus] # path to augustus path = /opt/augustus/bin/ [etraining] #",
"[new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path to HMMsearch",
"makeblastdb path = /usr/bin/ [augustus] # path to augustus path = /opt/augustus/bin/ [etraining]",
"to augustus perl scripts, redeclare it for each new script [gff2gbSmallDNA.pl] path =",
"path = /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] #",
"[makeblastdb] # path to makeblastdb path = /usr/bin/ [augustus] # path to augustus",
"os print( \"\"\"[busco] out_path = {0} tmp_path = {0}/tmp [tblastn] # path to",
"print_function import os print( \"\"\"[busco] out_path = {0} tmp_path = {0}/tmp [tblastn] #",
"path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path to HMMsearch executable",
"# path to makeblastdb path = /usr/bin/ [augustus] # path to augustus path",
"to HMMsearch executable path = /usr/local/bin/ [Rscript] # path to Rscript, if you",
"path = /opt/augustus/bin/ [etraining] # path to augustus etraining path = /opt/augustus/bin/ #",
"each new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path",
"/usr/bin/ [augustus] # path to augustus path = /opt/augustus/bin/ [etraining] # path to",
"scripts, redeclare it for each new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path",
"[tblastn] # path to tblastn path = /usr/bin/ [makeblastdb] # path to makeblastdb",
"/usr/bin/ [makeblastdb] # path to makeblastdb path = /usr/bin/ [augustus] # path to",
"= /usr/bin/ [hmmsearch] # path to HMMsearch executable path = /usr/local/bin/ [Rscript] #",
"= /usr/local/bin/ [Rscript] # path to Rscript, if you wish to use the",
"import print_function import os print( \"\"\"[busco] out_path = {0} tmp_path = {0}/tmp [tblastn]",
"= /usr/bin/ [makeblastdb] # path to makeblastdb path = /usr/bin/ [augustus] # path",
"= /opt/augustus/bin/ [etraining] # path to augustus etraining path = /opt/augustus/bin/ # path",
"to Rscript, if you wish to use the plot tool path = /usr/bin/\"\"\".format(os.environ['PWD'])",
"tmp_path = {0}/tmp [tblastn] # path to tblastn path = /usr/bin/ [makeblastdb] #",
"for each new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl]",
"path = /opt/augustus/bin/ # path to augustus perl scripts, redeclare it for each",
"= {0}/tmp [tblastn] # path to tblastn path = /usr/bin/ [makeblastdb] # path",
"# path to HMMsearch executable path = /usr/local/bin/ [Rscript] # path to Rscript,",
"to augustus etraining path = /opt/augustus/bin/ # path to augustus perl scripts, redeclare",
"path to HMMsearch executable path = /usr/local/bin/ [Rscript] # path to Rscript, if",
"path to tblastn path = /usr/bin/ [makeblastdb] # path to makeblastdb path =",
"/usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path to HMMsearch executable path =",
"to augustus path = /opt/augustus/bin/ [etraining] # path to augustus etraining path =",
"path to Rscript, if you wish to use the plot tool path =",
"new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path =",
"it for each new script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path = /usr/bin/",
"python from __future__ import print_function import os print( \"\"\"[busco] out_path = {0} tmp_path",
"import os print( \"\"\"[busco] out_path = {0} tmp_path = {0}/tmp [tblastn] # path",
"[augustus] # path to augustus path = /opt/augustus/bin/ [etraining] # path to augustus",
"# path to augustus path = /opt/augustus/bin/ [etraining] # path to augustus etraining",
"out_path = {0} tmp_path = {0}/tmp [tblastn] # path to tblastn path =",
"augustus path = /opt/augustus/bin/ [etraining] # path to augustus etraining path = /opt/augustus/bin/",
"print( \"\"\"[busco] out_path = {0} tmp_path = {0}/tmp [tblastn] # path to tblastn",
"to makeblastdb path = /usr/bin/ [augustus] # path to augustus path = /opt/augustus/bin/",
"= /opt/augustus/bin/ # path to augustus perl scripts, redeclare it for each new",
"/usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path to",
"tblastn path = /usr/bin/ [makeblastdb] # path to makeblastdb path = /usr/bin/ [augustus]",
"augustus etraining path = /opt/augustus/bin/ # path to augustus perl scripts, redeclare it",
"\"\"\"[busco] out_path = {0} tmp_path = {0}/tmp [tblastn] # path to tblastn path",
"[hmmsearch] # path to HMMsearch executable path = /usr/local/bin/ [Rscript] # path to",
"[Rscript] # path to Rscript, if you wish to use the plot tool",
"# path to augustus perl scripts, redeclare it for each new script [gff2gbSmallDNA.pl]",
"from __future__ import print_function import os print( \"\"\"[busco] out_path = {0} tmp_path =",
"/opt/augustus/bin/ [etraining] # path to augustus etraining path = /opt/augustus/bin/ # path to",
"HMMsearch executable path = /usr/local/bin/ [Rscript] # path to Rscript, if you wish",
"[optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path to HMMsearch executable path = /usr/local/bin/",
"= /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/ [hmmsearch] # path",
"= /usr/bin/ [augustus] # path to augustus path = /opt/augustus/bin/ [etraining] # path",
"augustus perl scripts, redeclare it for each new script [gff2gbSmallDNA.pl] path = /usr/bin/",
"# path to tblastn path = /usr/bin/ [makeblastdb] # path to makeblastdb path",
"{0}/tmp [tblastn] # path to tblastn path = /usr/bin/ [makeblastdb] # path to",
"#!/usr/bin/env python from __future__ import print_function import os print( \"\"\"[busco] out_path = {0}",
"path = /usr/bin/ [hmmsearch] # path to HMMsearch executable path = /usr/local/bin/ [Rscript]",
"script [gff2gbSmallDNA.pl] path = /usr/bin/ [new_species.pl] path = /usr/bin/ [optimize_augustus.pl] path = /usr/bin/"
] |
[
"= models.CharField(max_length=140) request = models.TextField(blank = True, null = True) goal = models.TextField(blank",
"True, null = True) done = models.BooleanField(default=False) datetime = models.DateTimeField() def __unicode__(self): return",
"с активами class Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank = True, null",
"активами class Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank = True, null =",
"= True, null = True) done = models.BooleanField(default=False) datetime = models.DateTimeField() def __unicode__(self):",
"= models.TextField(blank = True, null = True) goal = models.TextField(blank = True, null",
"user = models.CharField(max_length=140) request = models.TextField(blank = True, null = True) goal =",
"логирования действий пользователей с активами class Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank",
"models.TextField(blank = True, null = True) goal = models.TextField(blank = True, null =",
"null = True) goal = models.TextField(blank = True, null = True) done =",
"-*- # coding=<utf8> from django.db import models # Модели для логирования действий пользователей",
"coding=<utf8> from django.db import models # Модели для логирования действий пользователей с активами",
"Модели для логирования действий пользователей с активами class Logging(models.Model): user = models.CharField(max_length=140) request",
"# -*- coding:utf-8 -*- # coding=<utf8> from django.db import models # Модели для",
"coding:utf-8 -*- # coding=<utf8> from django.db import models # Модели для логирования действий",
"пользователей с активами class Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank = True,",
"models.TextField(blank = True, null = True) done = models.BooleanField(default=False) datetime = models.DateTimeField() def",
"True, null = True) goal = models.TextField(blank = True, null = True) done",
"= True, null = True) goal = models.TextField(blank = True, null = True)",
"-*- coding:utf-8 -*- # coding=<utf8> from django.db import models # Модели для логирования",
"= True) goal = models.TextField(blank = True, null = True) done = models.BooleanField(default=False)",
"# coding=<utf8> from django.db import models # Модели для логирования действий пользователей с",
"Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank = True, null = True) goal",
"# Модели для логирования действий пользователей с активами class Logging(models.Model): user = models.CharField(max_length=140)",
"null = True) done = models.BooleanField(default=False) datetime = models.DateTimeField() def __unicode__(self): return str(self.id)+';'.join((str(self.datetime),self.user,self.goal,str(self.done)))",
"from django.db import models # Модели для логирования действий пользователей с активами class",
"= models.TextField(blank = True, null = True) done = models.BooleanField(default=False) datetime = models.DateTimeField()",
"django.db import models # Модели для логирования действий пользователей с активами class Logging(models.Model):",
"models.CharField(max_length=140) request = models.TextField(blank = True, null = True) goal = models.TextField(blank =",
"для логирования действий пользователей с активами class Logging(models.Model): user = models.CharField(max_length=140) request =",
"True) goal = models.TextField(blank = True, null = True) done = models.BooleanField(default=False) datetime",
"действий пользователей с активами class Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank =",
"request = models.TextField(blank = True, null = True) goal = models.TextField(blank = True,",
"import models # Модели для логирования действий пользователей с активами class Logging(models.Model): user",
"goal = models.TextField(blank = True, null = True) done = models.BooleanField(default=False) datetime =",
"class Logging(models.Model): user = models.CharField(max_length=140) request = models.TextField(blank = True, null = True)",
"models # Модели для логирования действий пользователей с активами class Logging(models.Model): user ="
] |
[
"'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES",
"'great-header-language-select'}), choices=[] # set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args,",
"'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE +",
"= fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper()",
"'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES =",
"'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form):",
"directory_constants.choices import COUNTRY_CHOICES from django import forms from django.conf import settings from django.forms",
"COUNTRY_CHOICES from django import forms from django.conf import settings from django.forms import Select",
"class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES",
"django.forms import Select from django.template.loader import render_to_string from django.utils import translation from directory_components.forms",
"directory_components.forms import fields from directory_components import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form',",
"BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin:",
"error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass",
"def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country",
"language_choices def is_language_available(self, language_code): language_codes = [code for code, _ in self.fields['lang'].choices] return",
"def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code):",
"**kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes = [code for",
"directory_components import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ]",
"from directory_components.forms import fields from directory_components import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin',",
"from django.forms import Select from django.template.loader import render_to_string from django.utils import translation from",
"django import forms from django.conf import settings from django.forms import Select from django.template.loader",
"use_required_attribute = False error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class",
"by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices",
"import settings from django.forms import Select from django.template.loader import render_to_string from django.utils import",
"def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang =",
"= 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class",
"from django.utils import translation from directory_components.forms import fields from directory_components import helpers __all__",
"lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__ ) def __init__(self,",
"is_language_available(self, language_code): language_codes = [code for code, _ in self.fields['lang'].choices] return language_code in",
"label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None",
"language_codes = [code for code, _ in self.fields['lang'].choices] return language_code in language_codes def",
") def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self,",
"fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or",
"settings from django.forms import Select from django.template.loader import render_to_string from django.utils import translation",
"widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs):",
"code, _ in self.fields['lang'].choices] return language_code in language_codes def get_language_form_initial_data(): return { 'lang':",
"from directory_constants.choices import COUNTRY_CHOICES from django import forms from django.conf import settings from",
"COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error'",
"django.conf import settings from django.forms import Select from django.template.loader import render_to_string from django.utils",
"__all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\",",
"DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self})",
"{'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id':",
"choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form):",
"in self.fields['lang'].choices] return language_code in language_codes def get_language_form_initial_data(): return { 'lang': translation.get_language() }",
"forms.Form): pass class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def",
"a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class",
"language_code): language_codes = [code for code, _ in self.fields['lang'].choices] return language_code in language_codes",
"= [code for code, _ in self.fields['lang'].choices] return language_code in language_codes def get_language_form_initial_data():",
"import fields from directory_components import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data',",
"forms from django.conf import settings from django.forms import Select from django.template.loader import render_to_string",
"for code, _ in self.fields['lang'].choices] return language_code in language_codes def get_language_form_initial_data(): return {",
"import COUNTRY_CHOICES from django import forms from django.conf import settings from django.forms import",
"country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class =",
"country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country':",
"= [(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute",
"helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE =",
"return { 'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id':",
"fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args,",
"django.template.loader import render_to_string from django.utils import translation from directory_components.forms import fields from directory_components",
"**kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes = [code for code, _",
"[ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a",
"from django.conf import settings from django.forms import Select from django.template.loader import render_to_string from",
"Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES )",
"render_to_string from django.utils import translation from directory_components.forms import fields from directory_components import helpers",
"LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__ ) def",
"class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form':",
"'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None } class",
"BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error' def __str__(self):",
"__init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes",
"choices=[] # set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs)",
"CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return {",
"+ COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error' def __str__(self): return",
"def is_language_available(self, language_code): language_codes = [code for code, _ in self.fields['lang'].choices] return language_code",
"import translation from directory_components.forms import fields from directory_components import helpers __all__ = [",
"False error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form):",
"super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes = [code for code,",
"# set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices",
"'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[]",
"django.utils import translation from directory_components.forms import fields from directory_components import helpers __all__ =",
"class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__ )",
"helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] #",
"Select from django.template.loader import render_to_string from django.utils import translation from directory_components.forms import fields",
"language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes =",
"fields from directory_components import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data',",
"translation from directory_components.forms import fields from directory_components import helpers __all__ = [ 'CountryForm',",
"'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES",
"import forms from django.conf import settings from django.forms import Select from django.template.loader import",
"= [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select",
"'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")]",
"__init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def",
"self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}),",
"COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html',",
"import Select from django.template.loader import render_to_string from django.utils import translation from directory_components.forms import",
"= language_choices def is_language_available(self, language_code): language_codes = [code for code, _ in self.fields['lang'].choices]",
"import render_to_string from django.utils import translation from directory_components.forms import fields from directory_components import",
"= fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES,",
"get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang = fields.ChoiceField(",
"pass class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request):",
"{ 'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}),",
"None } class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by",
"class CountryForm(Form): country = fields.ChoiceField( label='Country', widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return",
"'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE",
"*args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes = [code",
"render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country = fields.ChoiceField( label='Country',",
"_ in self.fields['lang'].choices] return language_code in language_codes def get_language_form_initial_data(): return { 'lang': translation.get_language()",
"[(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute =",
"from django import forms from django.conf import settings from django.forms import Select from",
"from django.template.loader import render_to_string from django.utils import translation from directory_components.forms import fields from",
"__str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country =",
"return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin, forms.Form): pass class CountryForm(Form): country = fields.ChoiceField(",
"widget=Select(attrs={'id': 'great-header-country-select'}), choices=COUNTRIES ) def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None }",
") def get_country_form_initial_data(request): return { 'country': helpers.get_user_country(request).upper() or None } class LanguageForm(forms.Form): lang",
"set by __init__ ) def __init__(self, language_choices=settings.LANGUAGES, *args, **kwargs): super().__init__(*args, **kwargs) self.fields['lang'].choices =",
"\"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False",
"= False error_css_class = 'form-group-error' def __str__(self): return render_to_string('directory_components/form_widgets/form.html', {'form': self}) class Form(DirectoryComponentsFormMixin,",
"self.fields['lang'].choices = language_choices def is_language_available(self, language_code): language_codes = [code for code, _ in",
"from directory_components import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm',",
"} class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set by __init__",
"or None } class LanguageForm(forms.Form): lang = fields.ChoiceField( widget=Select(attrs={'id': 'great-header-language-select'}), choices=[] # set",
"import helpers __all__ = [ 'CountryForm', 'DirectoryComponentsFormMixin', 'Form', 'get_country_form_initial_data', 'get_language_form_initial_data', 'LanguageForm', ] BLANK_COUNTRY_CHOICE",
"[code for code, _ in self.fields['lang'].choices] return language_code in language_codes def get_language_form_initial_data(): return",
"= BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class DirectoryComponentsFormMixin: use_required_attribute = False error_css_class = 'form-group-error' def",
"] BLANK_COUNTRY_CHOICE = [(\"\", \"Select a country\")] COUNTRIES = BLANK_COUNTRY_CHOICE + COUNTRY_CHOICES class"
] |
[
"file will be skipped from analysis unless this flag has been set.') def",
"print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) # get all the ulog files found",
"files already analysed unless the overwrite flag was specified. A ulog file is",
"the' ' .ulg files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether",
"already exists (unless the overwrite flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the",
"the supplied directory. ulog files are skipped from the analysis, if a corresponding",
"import argparse import os, glob \"\"\" Runs process_logdata_ekf.py on the .ulg files in",
"the files already analysed unless the overwrite flag was specified. A ulog file",
"analysis unless this flag has been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): #",
"skipped from analysis unless this flag has been set.') def is_valid_directory(parser, arg): if",
"for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for",
"log file will be skipped from analysis unless this flag has been set.')",
"from analysis unless this flag has been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg):",
".pdf file already exists (unless the overwrite flag was set). \"\"\" parser =",
"flag was specified. A ulog file is consired to be analysed if #",
"set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data for the'",
"data for the' ' .ulg files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite',",
"# a corresponding .pdf file exists.' if not args.overwrite: print(\"skipping already analysed ulg",
"(unless the overwrite flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and",
"flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data",
".ulg files in \"+ulog_directory) # get all the ulog files found in the",
"this flag has been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists",
"analyse all ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file +\" for analysis\")",
"action='store_true', help='Whether to overwrite an already analysed file. If a file with .pdf",
"ulog file is consired to be analysed if # a corresponding .pdf file",
"is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists so return the directory return arg",
"is consired to be analysed if # a corresponding .pdf file exists.' if",
"ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) # get all the",
"else: parser.error('The directory {} does not exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path",
"# remove the files already analysed unless the overwrite flag was specified. A",
"<filename>Tools/ecl_ekf/batch_process_logdata_ekf.py<gh_stars>1-10 #!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import os, glob",
"#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import os, glob \"\"\"",
"parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an already analysed file. If a file",
"overwrite an already analysed file. If a file with .pdf extension exists for",
"-*- import argparse import os, glob \"\"\" Runs process_logdata_ekf.py on the .ulg files",
"parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) # get all",
"analysis, if a corresponding .pdf file already exists (unless the overwrite flag was",
"will be skipped from analysis unless this flag has been set.') def is_valid_directory(parser,",
"extension exists for a .ulg' 'file, the log file will be skipped from",
".pdf file exists.' if not args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files =",
"was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data for",
".pdf extension exists for a .ulg' 'file, the log file will be skipped",
"exists so return the directory return arg else: parser.error('The directory {} does not",
"file with .pdf extension exists for a .ulg' 'file, the log file will",
"been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists so return the",
"help='Whether to overwrite an already analysed file. If a file with .pdf extension",
"glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already analysed unless the overwrite flag was",
"def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists so return the directory return",
"return the directory return arg else: parser.error('The directory {} does not exist'.format(arg)) args",
"directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an already analysed file. If",
"be skipped from analysis unless this flag has been set.') def is_valid_directory(parser, arg):",
"the .ulg files in the supplied directory. ulog files are skipped from the",
"= argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data for the' ' .ulg files",
"in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for ulog_file in",
"specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an already analysed file.",
"the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already analysed",
"'file, the log file will be skipped from analysis unless this flag has",
"get all the ulog files found in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory,",
"with .pdf extension exists for a .ulg' 'file, the log file will be",
"consired to be analysed if # a corresponding .pdf file exists.' if not",
"args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files = [ulog_file for ulog_file in ulog_files",
"the log file will be skipped from analysis unless this flag has been",
"A ulog file is consired to be analysed if # a corresponding .pdf",
"\"\"\" Runs process_logdata_ekf.py on the .ulg files in the supplied directory. ulog files",
"import os, glob \"\"\" Runs process_logdata_ekf.py on the .ulg files in the supplied",
"in \"+ulog_directory) # get all the ulog files found in the specified directory",
"python # -*- coding: utf-8 -*- import argparse import os, glob \"\"\" Runs",
"# get all the ulog files found in the specified directory ulog_files =",
"utf-8 -*- import argparse import os, glob \"\"\" Runs process_logdata_ekf.py on the .ulg",
"if a corresponding .pdf file already exists (unless the overwrite flag was set).",
"for the' ' .ulg files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true',",
"if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading",
"return arg else: parser.error('The directory {} does not exist'.format(arg)) args = parser.parse_args() ulog_directory",
"ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already analysed unless the overwrite",
"' .ulg files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to",
"specified. A ulog file is consired to be analysed if # a corresponding",
"# -*- coding: utf-8 -*- import argparse import os, glob \"\"\" Runs process_logdata_ekf.py",
"estimator_status and ekf2_innovation message data for the' ' .ulg files in the specified",
".ulg files in the supplied directory. ulog files are skipped from the analysis,",
"set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists so return the directory",
"arg): if os.path.isdir(arg): # Directory exists so return the directory return arg else:",
"files in the supplied directory. ulog files are skipped from the analysis, if",
".ulg files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite",
"arg else: parser.error('The directory {} does not exist'.format(arg)) args = parser.parse_args() ulog_directory =",
"specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already analysed unless",
"analysed unless the overwrite flag was specified. A ulog file is consired to",
"[ulog_file for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files",
"os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file +\"",
"files in \"+ulog_directory) # get all the ulog files found in the specified",
"the .ulg files in \"+ulog_directory) # get all the ulog files found in",
"directory. ulog files are skipped from the analysis, if a corresponding .pdf file",
"argparse import os, glob \"\"\" Runs process_logdata_ekf.py on the .ulg files in the",
"'*.ulg')) # remove the files already analysed unless the overwrite flag was specified.",
"parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an already analysed file. If a",
"not args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files = [ulog_file for ulog_file in",
"if # a corresponding .pdf file exists.' if not args.overwrite: print(\"skipping already analysed",
"# analyse all ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file +\" for",
"file exists.' if not args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files = [ulog_file",
"the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an already analysed",
"coding: utf-8 -*- import argparse import os, glob \"\"\" Runs process_logdata_ekf.py on the",
"message data for the' ' .ulg files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o',",
"in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already",
"all the ulog files found in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg'))",
"and ekf2_innovation message data for the' ' .ulg files in the specified directory')",
"to overwrite an already analysed file. If a file with .pdf extension exists",
"found in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files",
"not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file",
"{} does not exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg",
"argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data for the' ' .ulg files in",
"= [ulog_file for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog",
"to be analysed if # a corresponding .pdf file exists.' if not args.overwrite:",
"analysed file. If a file with .pdf extension exists for a .ulg' 'file,",
".ulg' 'file, the log file will be skipped from analysis unless this flag",
"parser.error('The directory {} does not exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing",
"ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for ulog_file",
"the overwrite flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation",
"does not exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files",
"os.path.isdir(arg): # Directory exists so return the directory return arg else: parser.error('The directory",
"files are skipped from the analysis, if a corresponding .pdf file already exists",
"args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) # get all the ulog files",
"if not args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files = [ulog_file for ulog_file",
"ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file +\" for analysis\") os.system(\"python process_logdata_ekf.py",
"remove the files already analysed unless the overwrite flag was specified. A ulog",
"file. If a file with .pdf extension exists for a .ulg' 'file, the",
"ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all ulog files for ulog_file in ulog_files:",
"files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file +\" for analysis\") os.system(\"python process_logdata_ekf.py '{}'\".format(ulog_file))",
"print(\"skipping already analysed ulg files.\") ulog_files = [ulog_file for ulog_file in ulog_files if",
"\"+ulog_directory) # get all the ulog files found in the specified directory ulog_files",
"already analysed unless the overwrite flag was specified. A ulog file is consired",
"# Directory exists so return the directory return arg else: parser.error('The directory {}",
"flag has been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists so",
"corresponding .pdf file already exists (unless the overwrite flag was set). \"\"\" parser",
"ulg files.\") ulog_files = [ulog_file for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] #",
"a file with .pdf extension exists for a .ulg' 'file, the log file",
"args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) #",
"directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already analysed unless the",
"overwrite flag was specified. A ulog file is consired to be analysed if",
"overwrite flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message",
"files.\") ulog_files = [ulog_file for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse",
"all ulog files for ulog_file in ulog_files: print(\"\\n\"+\"loading \"+ulog_file +\" for analysis\") os.system(\"python",
"= glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the files already analysed unless the overwrite flag",
"analysed if # a corresponding .pdf file exists.' if not args.overwrite: print(\"skipping already",
"was specified. A ulog file is consired to be analysed if # a",
"directory {} does not exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the",
"If a file with .pdf extension exists for a .ulg' 'file, the log",
"process_logdata_ekf.py on the .ulg files in the supplied directory. ulog files are skipped",
"for a .ulg' 'file, the log file will be skipped from analysis unless",
"files in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an",
"'--overwrite', action='store_true', help='Whether to overwrite an already analysed file. If a file with",
"file is consired to be analysed if # a corresponding .pdf file exists.'",
"in the specified directory') parser.add_argument(\"directory_path\") parser.add_argument('-o', '--overwrite', action='store_true', help='Whether to overwrite an already",
"already analysed file. If a file with .pdf extension exists for a .ulg'",
"exists.' if not args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files = [ulog_file for",
"\"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data for the' '",
"unless this flag has been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory",
"ulog_files = [ulog_file for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))] # analyse all",
"Runs process_logdata_ekf.py on the .ulg files in the supplied directory. ulog files are",
"exists (unless the overwrite flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse the estimator_status",
"os, glob \"\"\" Runs process_logdata_ekf.py on the .ulg files in the supplied directory.",
"a .ulg' 'file, the log file will be skipped from analysis unless this",
"not exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files in",
"from the analysis, if a corresponding .pdf file already exists (unless the overwrite",
"on the .ulg files in the supplied directory. ulog files are skipped from",
"ulog files found in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove",
"exists for a .ulg' 'file, the log file will be skipped from analysis",
"= args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) # get all the ulog",
"the overwrite flag was specified. A ulog file is consired to be analysed",
"already analysed ulg files.\") ulog_files = [ulog_file for ulog_file in ulog_files if not",
"the ulog files found in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) #",
"are skipped from the analysis, if a corresponding .pdf file already exists (unless",
"directory return arg else: parser.error('The directory {} does not exist'.format(arg)) args = parser.parse_args()",
"ekf2_innovation message data for the' ' .ulg files in the specified directory') parser.add_argument(\"directory_path\")",
"unless the overwrite flag was specified. A ulog file is consired to be",
"= parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory) # get",
"in the supplied directory. ulog files are skipped from the analysis, if a",
"a corresponding .pdf file already exists (unless the overwrite flag was set). \"\"\"",
"parser = argparse.ArgumentParser(description='Analyse the estimator_status and ekf2_innovation message data for the' ' .ulg",
"supplied directory. ulog files are skipped from the analysis, if a corresponding .pdf",
"be analysed if # a corresponding .pdf file exists.' if not args.overwrite: print(\"skipping",
"an already analysed file. If a file with .pdf extension exists for a",
"exist'.format(arg)) args = parser.parse_args() ulog_directory = args.directory_path print(\"\\n\"+\"analysing the .ulg files in \"+ulog_directory)",
"Directory exists so return the directory return arg else: parser.error('The directory {} does",
"the analysis, if a corresponding .pdf file already exists (unless the overwrite flag",
"a corresponding .pdf file exists.' if not args.overwrite: print(\"skipping already analysed ulg files.\")",
"if os.path.isdir(arg): # Directory exists so return the directory return arg else: parser.error('The",
"file already exists (unless the overwrite flag was set). \"\"\" parser = argparse.ArgumentParser(description='Analyse",
"glob \"\"\" Runs process_logdata_ekf.py on the .ulg files in the supplied directory. ulog",
"has been set.') def is_valid_directory(parser, arg): if os.path.isdir(arg): # Directory exists so return",
"the directory return arg else: parser.error('The directory {} does not exist'.format(arg)) args =",
"ulog files are skipped from the analysis, if a corresponding .pdf file already",
"corresponding .pdf file exists.' if not args.overwrite: print(\"skipping already analysed ulg files.\") ulog_files",
"skipped from the analysis, if a corresponding .pdf file already exists (unless the",
"the estimator_status and ekf2_innovation message data for the' ' .ulg files in the",
"so return the directory return arg else: parser.error('The directory {} does not exist'.format(arg))",
"analysed ulg files.\") ulog_files = [ulog_file for ulog_file in ulog_files if not os.path.exists('{}.pdf'.format(ulog_file))]",
"-*- coding: utf-8 -*- import argparse import os, glob \"\"\" Runs process_logdata_ekf.py on",
"files found in the specified directory ulog_files = glob.glob(os.path.join(ulog_directory, '*.ulg')) # remove the"
] |
[
"= {} for j in range(len(predicted_value)): for i in range(len(lables_list)): bb = predicted_value[j][i]",
"range(len(predicted_value)): for i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder",
"numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this line return",
"yy = str(i) labelss_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder)",
"obj.tolist() # add this line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name()",
"json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found",
"elif isinstance(obj, (numpy.ndarray,)): # add this line return obj.tolist() # add this line",
"line return obj.tolist() # add this line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name",
"get_features() lables_list = get_labels() testing_values = [] for i in features_list: feature_value =",
"str(i) labelss_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data =",
"return obj.tolist() # add this line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name =",
"float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted",
"import json import numpy from bm.controllers.prediction.ModelController import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name,",
"return json_data def getplotiamge(content): return 0 def getmodelfeatures(): features_list = get_features() features_json =",
"get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16,",
"class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32,",
"predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted values json object predicted_values_json = {}",
"1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels():",
"j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data",
"get_labels() labelss_json = {} j = 0 for i in labels_list: yy =",
"feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted values json",
"numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this line return obj.tolist() #",
"NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return 0",
"i in labels_list: yy = str(i) labelss_json[i] = i j += 1 #",
"i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder)",
"json_data = json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list = get_labels() labelss_json =",
"in labels_list: yy = str(i) labelss_json[i] = i j += 1 # NpEncoder",
"= i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder)",
"obj) def predictvalues(content): model_name = get_model_name() features_list = get_features() lables_list = get_labels() testing_values",
"+= 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def",
"json object predicted_values_json = {} for j in range(len(predicted_value)): for i in range(len(lables_list)):",
"default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16,",
"json_data def getplotiamge(content): return 0 def getmodelfeatures(): features_list = get_features() features_json = {}",
"bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_,",
"numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this line return obj.tolist()",
"numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj,",
"return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name() features_list = get_features() lables_list =",
"def getmodellabels(): labels_list = get_labels() labelss_json = {} j = 0 for i",
"= feature_value # float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values)",
"# NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return",
"NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list = get_labels() labelss_json",
"range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data =",
"features_json = {} j = 0 for i in features_list: yy = str(i)",
"1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents):",
"else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted values json object",
"features_list = get_features() lables_list = get_labels() testing_values = [] for i in features_list:",
"# NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return",
"nomodelfound(): no_model_found = {'no_model':'No Model found' } json_data = json.dumps(no_model_found, cls=NpEncoder) return json_data",
"from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj,",
"j = 0 for i in labels_list: yy = str(i) labelss_json[i] = i",
"= NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return 0 def",
"def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found = {'no_model':'No Model found' } json_data",
"predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data",
"get_model_name() features_list = get_features() lables_list = get_labels() testing_values = [] for i in",
"bm.controllers.prediction.ModelController import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self,",
"i in features_list: feature_value = str(content[i]) final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric()",
"features_list: feature_value = str(content[i]) final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric() else feature_value",
"numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj)",
"i in features_list: yy = str(i) features_json[i] = i j += 1 #",
"get_features() features_json = {} j = 0 for i in features_list: yy =",
"= get_labels() labelss_json = {} j = 0 for i in labels_list: yy",
"isinstance(obj, (numpy.ndarray,)): # add this line return obj.tolist() # add this line return",
"import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj):",
"return json_data def getmodellabels(): labels_list = get_labels() labelss_json = {} j = 0",
"= predict_values_from_model(model_name, testing_values) # Create predicted values json object predicted_values_json = {} for",
"numpy from bm.controllers.prediction.ModelController import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder):",
"json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name() features_list = get_features() lables_list = get_labels()",
"= 0 for i in features_list: yy = str(i) features_json[i] = i j",
"labelss_json = {} j = 0 for i in labels_list: yy = str(i)",
"Create predicted values json object predicted_values_json = {} for j in range(len(predicted_value)): for",
"testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted values json object predicted_values_json =",
"j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data",
"predict_values_from_model(model_name, testing_values) # Create predicted values json object predicted_values_json = {} for j",
"obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32,",
"int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): #",
"json import numpy from bm.controllers.prediction.ModelController import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels",
"= str(i) labelss_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data",
"return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this line return obj.tolist() # add",
"(numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj)",
"testing_values = [] for i in features_list: feature_value = str(content[i]) final_feature_value = feature_value",
"return 0 def nomodelfound(): no_model_found = {'no_model':'No Model found' } json_data = json.dumps(no_model_found,",
"add this line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name() features_list =",
"{} j = 0 for i in features_list: yy = str(i) features_json[i] =",
"numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif",
"str(content[i]) final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value =",
"= get_features() features_json = {} j = 0 for i in features_list: yy",
"for i in features_list: yy = str(i) features_json[i] = i j += 1",
"predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return",
"features_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json,",
"= 0 for i in labels_list: yy = str(i) labelss_json[i] = i j",
"0 def nomodelfound(): no_model_found = {'no_model':'No Model found' } json_data = json.dumps(no_model_found, cls=NpEncoder)",
"i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return",
"this line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name() features_list = get_features()",
"import numpy from bm.controllers.prediction.ModelController import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class",
"numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj,",
"feature_value = str(content[i]) final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value)",
"str(i) features_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data =",
"json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return 0 def getmodelfeatures(): features_list = get_features()",
"isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this",
"def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8,",
"cls=NpEncoder) return json_data def getmodellabels(): labels_list = get_labels() labelss_json = {} j =",
"0 for i in features_list: yy = str(i) features_json[i] = i j +=",
"(numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this line",
"j in range(len(predicted_value)): for i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i]",
"in range(len(predicted_value)): for i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] #",
"(numpy.ndarray,)): # add this line return obj.tolist() # add this line return json.JSONEncoder.default(self,",
"numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_,",
"= get_model_name() features_list = get_features() lables_list = get_labels() testing_values = [] for i",
"def getplotiamge(content): return 0 def getmodelfeatures(): features_list = get_features() features_json = {} j",
"# add this line return obj.tolist() # add this line return json.JSONEncoder.default(self, obj)",
"for i in features_list: feature_value = str(content[i]) final_feature_value = feature_value # float(feature_value) if",
"bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json,",
"feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted values json object predicted_values_json",
"= predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder)",
"{} j = 0 for i in labels_list: yy = str(i) labelss_json[i] =",
"add this line return obj.tolist() # add this line return json.JSONEncoder.default(self, obj) def",
"features_list: yy = str(i) features_json[i] = i j += 1 # NpEncoder =",
"labelss_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json,",
"def nomodelfound(): no_model_found = {'no_model':'No Model found' } json_data = json.dumps(no_model_found, cls=NpEncoder) return",
"cls=NpEncoder) return json_data def getplotiamge(content): return 0 def getmodelfeatures(): features_list = get_features() features_json",
"for i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder =",
"getplotiamge(content): return 0 def getmodelfeatures(): features_list = get_features() features_json = {} j =",
"labels_list = get_labels() labelss_json = {} j = 0 for i in labels_list:",
"yy = str(i) features_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder)",
"return json_data def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found = {'no_model':'No Model found'",
"predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if",
"float(obj) elif isinstance(obj, (numpy.ndarray,)): # add this line return obj.tolist() # add this",
"from bm.controllers.prediction.ModelController import predict_values_from_model from bm.db_helper.AttributesHelper import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def",
"= i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder)",
"final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name,",
"json_data def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found = {'no_model':'No Model found' }",
"json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found = {'no_model':'No",
"predicted values json object predicted_values_json = {} for j in range(len(predicted_value)): for i",
"return 0 def getmodelfeatures(): features_list = get_features() features_json = {} j = 0",
"json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list = get_labels() labelss_json = {} j",
"+= 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data def",
"0 for i in labels_list: yy = str(i) labelss_json[i] = i j +=",
"numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16,",
"getmodelfeatures(): features_list = get_features() features_json = {} j = 0 for i in",
"values json object predicted_values_json = {} for j in range(len(predicted_value)): for i in",
"NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return 0 def getmodelfeatures():",
"get_labels() testing_values = [] for i in features_list: feature_value = str(content[i]) final_feature_value =",
"# NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list",
"def predictvalues(content): model_name = get_model_name() features_list = get_features() lables_list = get_labels() testing_values =",
"= {} j = 0 for i in labels_list: yy = str(i) labelss_json[i]",
"if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create predicted values",
"def getmodelfeatures(): features_list = get_features() features_json = {} j = 0 for i",
"for i in labels_list: yy = str(i) labelss_json[i] = i j += 1",
"numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif",
"= NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list = get_labels()",
"in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] = predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data",
"= NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return 0 def",
"numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)):",
"= {} j = 0 for i in features_list: yy = str(i) features_json[i]",
"json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return 0 def getmodelfeatures(): features_list",
"line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name() features_list = get_features() lables_list",
"NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list =",
"NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64,",
"feature_value # float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) #",
"if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)):",
"getmodelprofile(contents): return 0 def nomodelfound(): no_model_found = {'no_model':'No Model found' } json_data =",
"cls=NpEncoder) return json_data def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found = {'no_model':'No Model",
"this line return obj.tolist() # add this line return json.JSONEncoder.default(self, obj) def predictvalues(content):",
"= json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return 0 def nomodelfound(): no_model_found =",
"predicted_values_json = {} for j in range(len(predicted_value)): for i in range(len(lables_list)): bb =",
"return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)):",
"= [] for i in features_list: feature_value = str(content[i]) final_feature_value = feature_value #",
"= json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return 0 def getmodelfeatures(): features_list =",
"0 def getmodelfeatures(): features_list = get_features() features_json = {} j = 0 for",
"get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8,",
"= get_features() lables_list = get_labels() testing_values = [] for i in features_list: feature_value",
"in features_list: feature_value = str(content[i]) final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric() else",
"model_name = get_model_name() features_list = get_features() lables_list = get_labels() testing_values = [] for",
"= json.dumps(features_json, cls=NpEncoder) return json_data def getmodellabels(): labels_list = get_labels() labelss_json = {}",
"getmodellabels(): labels_list = get_labels() labelss_json = {} j = 0 for i in",
"for j in range(len(predicted_value)): for i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]] =",
"= get_labels() testing_values = [] for i in features_list: feature_value = str(content[i]) final_feature_value",
"json_data def getmodellabels(): labels_list = get_labels() labelss_json = {} j = 0 for",
"testing_values) # Create predicted values json object predicted_values_json = {} for j in",
"in features_list: yy = str(i) features_json[i] = i j += 1 # NpEncoder",
"j = 0 for i in features_list: yy = str(i) features_json[i] = i",
"get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc, numpy.intp,",
"= str(i) features_json[i] = i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data",
"[] for i in features_list: feature_value = str(content[i]) final_feature_value = feature_value # float(feature_value)",
"predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content):",
"NpEncoder(json.JSONEncoder) json_data = json.dumps(labelss_json, cls=NpEncoder) return json_data def getmodelprofile(contents): return 0 def nomodelfound():",
"labels_list: yy = str(i) labelss_json[i] = i j += 1 # NpEncoder =",
"predictvalues(content): model_name = get_model_name() features_list = get_features() lables_list = get_labels() testing_values = []",
"# add this line return json.JSONEncoder.default(self, obj) def predictvalues(content): model_name = get_model_name() features_list",
"# float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value = predict_values_from_model(model_name, testing_values) # Create",
"lables_list = get_labels() testing_values = [] for i in features_list: feature_value = str(content[i])",
"NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def getplotiamge(content): return 0",
"= predicted_value[j][i] # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(predicted_values_json, cls=NpEncoder) return json_data def",
"import get_features, get_model_name, get_labels class NpEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, (numpy.int_, numpy.intc,",
"# Create predicted values json object predicted_values_json = {} for j in range(len(predicted_value)):",
"features_list = get_features() features_json = {} j = 0 for i in features_list:",
"i j += 1 # NpEncoder = NpEncoder(json.JSONEncoder) json_data = json.dumps(features_json, cls=NpEncoder) return",
"numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return",
"isinstance(obj, (numpy.int_, numpy.intc, numpy.intp, numpy.int8, numpy.int16, numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return",
"= str(content[i]) final_feature_value = feature_value # float(feature_value) if feature_value.isnumeric() else feature_value testing_values.append(final_feature_value) predicted_value",
"numpy.int32, numpy.int64, numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64)): return int(obj) elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32,",
"object predicted_values_json = {} for j in range(len(predicted_value)): for i in range(len(lables_list)): bb",
"{} for j in range(len(predicted_value)): for i in range(len(lables_list)): bb = predicted_value[j][i] predicted_values_json[lables_list[i]]",
"elif isinstance(obj, (numpy.float_, numpy.float16, numpy.float32, numpy.float64)): return float(obj) elif isinstance(obj, (numpy.ndarray,)): # add"
] |
[] |
[
"patch_idx, image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group",
"args.scale) * args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale,",
"image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group =",
"= hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC)",
"2))(hr): hr = hr.resize(((hr.width // args.scale) * args.scale, (hr.height // args.scale) * args.scale),",
"'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0",
"as pil_image from torchvision.transforms import transforms def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group",
"// args.scale) * args.scale hr_height = (hr.height // args.scale) * args.scale hr =",
"image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width",
"hr.width // 2))(hr): hr = hr.resize(((hr.width // args.scale) * args.scale, (hr.height // args.scale)",
"lr = hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr",
"pil_image from torchvision.transforms import transforms def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group =",
"= np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx, image_path) h5_file.close()",
"in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width //",
"(hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height //",
"2, hr.width // 2))(hr): hr = hr.resize(((hr.width // args.scale) * args.scale, (hr.height //",
"parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False)",
"glob import h5py import numpy as np import PIL.Image as pil_image from torchvision.transforms",
"// args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr)",
"eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i,",
"= np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i,",
"// args.scale) * args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width",
"hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path in",
"data=hr) patch_idx += 1 print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path,",
"= (hr.height // args.scale) * args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr =",
"hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if",
"args.scale hr_height = (hr.height // args.scale) * args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC)",
"args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx",
"hr_group = h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width =",
"// args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i),",
"= np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ == '__main__': parser",
"h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path in enumerate(image_list): hr",
"np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx,",
"enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) * args.scale hr_height =",
"resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr)",
"for i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height //",
"args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height //",
"hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) * args.scale hr_height = (hr.height",
"hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx),",
"== '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\")",
"h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path",
"= hr.resize(((hr.width // args.scale) * args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr",
"transforms def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr')",
"= np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__",
"lr = hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr",
"sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for",
"def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for",
"import argparse import glob import h5py import numpy as np import PIL.Image as",
"= h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width",
"#../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args() if",
"hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr",
"= h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i,",
"h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path",
"'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr",
"lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ == '__main__':",
"= pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) * args.scale hr_height = (hr.height //",
"hr_width = (hr.width // args.scale) * args.scale hr_height = (hr.height // args.scale) *",
"// args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx),",
"hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr",
"np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx, image_path) h5_file.close() def",
"(hr.height // args.scale) * args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width",
"1 print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group =",
"__name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str,",
"numpy as np import PIL.Image as pil_image from torchvision.transforms import transforms def train(args):",
"h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))):",
"patch_idx += 1 print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w')",
"train(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list =",
"= h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path in enumerate(image_list):",
"image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) * args.scale",
"= pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr =",
"// 2))(hr): hr = hr.resize(((hr.width // args.scale) * args.scale, (hr.height // args.scale) *",
"+= 1 print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group",
"argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval',",
"hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr =",
"as np import PIL.Image as pil_image from torchvision.transforms import transforms def train(args): h5_file",
"= h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB')",
"// 2, hr.width // 2))(hr): hr = hr.resize(((hr.width // args.scale) * args.scale, (hr.height",
"= (hr.width // args.scale) * args.scale hr_height = (hr.height // args.scale) * args.scale",
"h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width",
"np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ ==",
"torchvision.transforms import transforms def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group",
"print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr')",
"h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir)))",
"= sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB')",
"<gh_stars>10-100 import argparse import glob import h5py import numpy as np import PIL.Image",
"args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC)",
"h5_file.close() def eval(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr')",
"h5py import numpy as np import PIL.Image as pil_image from torchvision.transforms import transforms",
"resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close()",
"enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr):",
"import glob import h5py import numpy as np import PIL.Image as pil_image from",
"lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx, image_path) h5_file.close() def eval(args):",
"= 0 for i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in",
"'__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale',",
"print(i) h5_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5",
"args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i)",
"PIL.Image as pil_image from torchvision.transforms import transforms def train(args): h5_file = h5py.File(args.output_path, 'w')",
"resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx +=",
"// args.scale) * args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width //",
"args.scale) * args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width //",
"import numpy as np import PIL.Image as pil_image from torchvision.transforms import transforms def",
"parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int,",
"hr = hr.resize(((hr.width // args.scale) * args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC)",
"lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx, image_path)",
"import transforms def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group =",
"hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file =",
"#../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args()",
"required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args() if not args.eval: train(args)",
"args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr)",
"* args.scale hr_height = (hr.height // args.scale) * args.scale hr = hr.resize((hr_width, hr_height),",
"from torchvision.transforms import transforms def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr')",
"def train(args): h5_file = h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list",
"for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr = hr.resize(((hr.width //",
"in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr = hr.resize(((hr.width // args.scale) *",
"hr_height = (hr.height // args.scale) * args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr",
"// args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr)",
"patch_idx = 0 for i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr",
"in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) * args.scale hr_height",
"lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser()",
"hr in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr = hr.resize(((hr.width // args.scale)",
"lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr =",
"type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args",
"= argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8)",
"= h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') for i, image_path in",
"// args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height // args.scale),",
"* args.scale hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height",
"h5_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR",
"lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for",
"args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr)",
"i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) *",
"hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr)",
"hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str,",
"import h5py import numpy as np import PIL.Image as pil_image from torchvision.transforms import",
"resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr)",
"data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1 print(i, patch_idx, image_path) h5_file.close() def eval(args): h5_file",
"hr = hr.resize((hr_width, hr_height), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr_height // args.scale),",
"hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(patch_idx), data=lr) hr_group.create_dataset(str(patch_idx), data=hr) patch_idx += 1",
"h5_file.create_group('hr') for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width //",
"argparse import glob import h5py import numpy as np import PIL.Image as pil_image",
"required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args =",
"h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx =",
"pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr = hr.resize(((hr.width",
"parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args() if not args.eval: train(args) else:",
"type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args() if not args.eval: train(args) else: eval(args)",
"image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx = 0 for i, image_path in enumerate(image_list): hr =",
"import PIL.Image as pil_image from torchvision.transforms import transforms def train(args): h5_file = h5py.File(args.output_path,",
"hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i),",
"= hr.resize((hr.width // args.scale, hr_height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr =",
"if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\") #../../classical_SR_datasets/Set5/Set5 #../../DIV2K/DIV2K_train_HR/HR parser.add_argument('--output-path',",
"transforms.FiveCrop(size=(hr.height // 2, hr.width // 2))(hr): hr = hr.resize(((hr.width // args.scale) * args.scale,",
"* args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr",
"hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr = np.array(lr)",
"data=hr) print(i) h5_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir', type=str, required=False,default=\"../../DIV2K/DIV2K_train_HR/HR\")",
"for i, image_path in enumerate(sorted(glob.glob('{}/*'.format(args.images_dir)))): hr = pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale)",
"np.array(lr) lr_group.create_dataset(str(i), data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ == '__main__': parser =",
"(hr.width // args.scale) * args.scale hr_height = (hr.height // args.scale) * args.scale hr",
"args.scale) * args.scale hr_height = (hr.height // args.scale) * args.scale hr = hr.resize((hr_width,",
"= hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr = np.array(hr) lr =",
"i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height // 2,",
"data=lr) hr_group.create_dataset(str(i), data=hr) print(i) h5_file.close() if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--images-dir',",
"pil_image.open(image_path).convert('RGB') hr_width = (hr.width // args.scale) * args.scale hr_height = (hr.height // args.scale)",
"np import PIL.Image as pil_image from torchvision.transforms import transforms def train(args): h5_file =",
"parser.add_argument('--output-path', type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args() if not",
"= h5py.File(args.output_path, 'w') lr_group = h5_file.create_group('lr') hr_group = h5_file.create_group('hr') image_list = sorted(glob.glob('{}/*'.format(args.images_dir))) patch_idx",
"* args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale,",
"hr.resize(((hr.width // args.scale) * args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr =",
"0 for i, image_path in enumerate(image_list): hr = pil_image.open(image_path).convert('RGB') for hr in transforms.FiveCrop(size=(hr.height",
"args.scale, (hr.height // args.scale) * args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height",
"args.scale), resample=pil_image.BICUBIC) lr = hr.resize((hr.width // args.scale, hr.height // args.scale), resample=pil_image.BICUBIC) hr =",
"type=str, required=False,default=\"./h5file_DIV2K_train_HR_x8_train\") parser.add_argument('--scale', type=int, default=8) parser.add_argument('--eval', action='store_true',default=False) args = parser.parse_args() if not args.eval:"
] |
[
"self.y = y self.n = n self.opt = opt if type(self.n) != list",
"if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item",
"yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or",
"x and y. amplitude (float): Amplitud of gaussian. x0 , yo (float): Center",
"def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1]",
"= ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) count = count +",
"+ 1 if loc == True: return self.eddies def make_random_walk(self,indexs, steps): move_dict =",
"data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if self.opt == 'no_interaction' or self.opt ==",
"list. Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude = coords[2] xo =",
"return [step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return",
"def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y,",
"else: g = (x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc ==",
"key,item in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in",
"go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self,",
"= x self.y = y self.n = n self.opt = opt if type(self.n)",
"of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset (Float): Gaussian Offset. Returns:",
"plotting purposes. Args: coords [x,y] (list|array): Coordinates in x and y. amplitude (float):",
"4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for _",
"or (distance < away_val*self.b).any() ) count = count + 1 if loc ==",
"LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3)",
"ii == 0] if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker",
"#### BUG WHEN LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a",
"< away_val*self.b).any() ) count = count + 1 if loc == True: return",
"newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in",
"import numpy as np import random as rnd import pdb def dist(loc1,loc2): return",
"raise ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step] def",
"self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance <",
"= { 1: self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6:",
"< away_val*self.b).any() ) or loc==True count = 0 while checker or count >=",
"10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy",
"= -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( -",
"np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''):",
"item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x =",
"xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros = [ii for ii in",
"move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N):",
"keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen))",
"Coordinates in x and y. amplitude (float): Amplitud of gaussian. x0 , yo",
"scan_eddym function. ''' x=coords[0] y=coords[1] amplitude = coords[2] xo = float(coords[3]) yo =",
"= np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt ==",
"twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build",
"[step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step]",
"for plotting purposes. Args: coords [x,y] (list|array): Coordinates in x and y. amplitude",
"theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build a 2D gaussian. Notes:",
"in x and y. amplitude (float): Amplitud of gaussian. x0 , yo (float):",
"to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y] (list|array): Coordinates in x",
"for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self,",
"yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros = [ii for ii in distance",
"if type(self.n) != list or type(self.n) != tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\",
"x=coords[0] y=coords[1] amplitude = coords[2] xo = float(coords[3]) yo = float(coords[4]) xo =",
"if sigma_y or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b =",
"8: self.go_upright, } #for _ in range(steps): for ii in indexs: move_in_a_direction =",
"0 while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for",
"[ii for ii in distance if ii == 0] if len(numzeros) <= 1:",
"reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data",
"in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() )",
"Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset (Float): Gaussian Offset. Returns: g.ravel()",
"+ (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x)",
"opt if type(self.n) != list or type(self.n) != tuple: self.eddies = {'eddy_n%s' %",
"item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items():",
"x0 , yo (float): Center of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation.",
"+ (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) +",
"distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros = [ii for ii in distance if",
"movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if",
"twoD_Gaussian ******************* Build a 2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting",
"distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or (distance",
"c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc",
"float(coords[4]) xo = float(xo) yo = float(yo) if sigma_y or sigma_x != 0:",
"(Float): Orientation. offset (Float): Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian surface in",
"= float(yo) if sigma_y or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2)",
"return [-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords,",
"for t in range(N): #pdb.set_trace() if self.opt == 'no_interaction' or self.opt == 'Nint':",
"= coords[2] xo = float(coords[3]) yo = float(coords[4]) xo = float(xo) yo =",
"if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker = ((distance <",
"- Gaussian surface in a list. Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1]",
"== 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:]))",
"(np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2)",
"for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1 in",
"away_val*self.a).any() or (distance < away_val*self.b).any() ) or loc==True count = 0 while checker",
"self.opt = opt if type(self.n) != list or type(self.n) != tuple: self.eddies =",
"#pdb.set_trace() if self.opt == 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for",
"(a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0 return g.ravel()",
"def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0] def",
"def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0):",
"= (x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)]",
"or type(self.n) != tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7,",
"eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for",
"data=data+gauss return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if",
"= b*rnd.uniform(0.7, 1.3) self.x = x self.y = y self.n = n self.opt",
"indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for",
"self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item",
"Gaussian surface in a list. Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude",
"if self.opt == 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin yloc=rnd.randint(0,self.ylen-1)+margin eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[xloc],self.y[yloc]) else: eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[self.eddies[key]['loc'][0][0]+margin],self.y[self.eddies[key]['loc'][0][1]+margin])",
"in range(steps): for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1]",
"for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50):",
"'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else: raise ValueError(\"No right input.\")",
"self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin",
"count = 0 while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]]",
"1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker = ((distance < away_val*self.a).any() or (distance",
"(np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else:",
"else: pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data",
"in distance if ii == 0] if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0]",
"return [0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return",
"self.x = x self.y = y self.n = n self.opt = opt if",
"[0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step]",
"2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else: raise ValueError(\"No right input.\") def",
"ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step):",
"2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False):",
"do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y] (list|array): Coordinates in x and",
"(np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g =",
"theta (Float): Orientation. offset (Float): Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian surface",
"np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin yloc=rnd.randint(0,self.ylen-1)+margin eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[xloc],self.y[yloc])",
"float(yo) if sigma_y or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b",
"= a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x = x self.y = y",
") or loc==True count = 0 while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1)",
"make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down,",
"x self.y = y self.n = n self.opt = opt if type(self.n) !=",
"8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace()",
"BUG WHEN LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a =",
"go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step):",
"*************** twoD_Gaussian ******************* Build a 2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for",
"coords[2] xo = float(coords[3]) yo = float(coords[4]) xo = float(xo) yo = float(yo)",
"= (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) +",
"def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return",
"offset (Float): Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian surface in a list.",
"(list|array) - Gaussian surface in a list. Usage: Check scan_eddym function. ''' x=coords[0]",
"of gaussian. x0 , yo (float): Center of Gausian. sigma_x,sigma_y (float): Deviation. theta",
"indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if self.opt ==",
"self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for",
"len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker = ((distance < away_val*self.a).any()",
"= n self.opt = opt if type(self.n) != list or type(self.n) != tuple:",
"eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc])",
"Args: coords [x,y] (list|array): Coordinates in x and y. amplitude (float): Amplitud of",
"_ in range(steps): for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return",
"= away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) count",
"self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y",
"for ii in distance if ii == 0] if len(numzeros) <= 1: distance[distance==0]=np.inf",
"1: self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7:",
"data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss",
"newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for",
", yo (float): Center of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset",
"return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if self.opt",
"Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y] (list|array): Coordinates in",
"range(steps): for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def",
"yo (float): Center of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset (Float):",
"self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for _ in range(steps): for ii in",
"ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50):",
"float(xo) yo = float(yo) if sigma_y or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2)",
"a list. Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude = coords[2] xo",
"def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x",
"WHEN LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7,",
"type(self.n) != list or type(self.n) != tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\",
"go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step):",
"Orientation. offset (Float): Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian surface in a",
"''' *************** twoD_Gaussian ******************* Build a 2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y))",
"0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c =",
"+ (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g",
"self.go_upright, } #for _ in range(steps): for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1,",
"import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) !=",
"input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0]",
"self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj",
"if self.opt == 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys,",
"or loc==True count = 0 while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1)",
"1 if loc == True: return self.eddies def make_random_walk(self,indexs, steps): move_dict = {",
"function. ''' x=coords[0] y=coords[1] amplitude = coords[2] xo = float(coords[3]) yo = float(coords[4])",
"self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for _ in",
"or (distance < away_val*self.b).any() ) or loc==True count = 0 while checker or",
"for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or (distance <",
"rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else: raise ValueError(\"No right",
"item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for",
"Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude = coords[2] xo = float(coords[3])",
"g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y] (list|array): Coordinates in x and y.",
"go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta,",
"key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1 in self.eddies.items():",
"return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt",
"+ 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0 return g.ravel() def",
"checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) count = count",
"slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build a 2D gaussian. Notes: Remmember",
"7: self.go_upleft, 8: self.go_upright, } #for _ in range(steps): for ii in indexs:",
"xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any()",
"a 2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords",
"(x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for",
"#pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros = [ii for ii",
"pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y)",
"#for _ in range(steps): for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps)",
"amplitude (float): Amplitud of gaussian. x0 , yo (float): Center of Gausian. sigma_x,sigma_y",
"= 0 while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1]",
"item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0],",
"yo = float(coords[4]) xo = float(xo) yo = float(yo) if sigma_y or sigma_x",
"type(self.n) != tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)}",
"else: raise ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step]",
"= [ii for ii in distance if ii == 0] if len(numzeros) <=",
"True: return self.eddies def make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up, 2: self.go_right,",
"move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t",
"= move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in",
"(np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo)",
"distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker = ((distance < away_val*self.a).any() or (distance <",
"t in range(N): #pdb.set_trace() if self.opt == 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True)",
"[0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step]",
"range(N): #pdb.set_trace() if self.opt == 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass",
"distance[distance==0] = away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() )",
"'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss",
">= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx",
"numzeros = [ii for ii in distance if ii == 0] if len(numzeros)",
"rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x)",
"if loc == True: return self.eddies def make_random_walk(self,indexs, steps): move_dict = { 1:",
"self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for _ in range(steps): for",
"y=coords[1] amplitude = coords[2] xo = float(coords[3]) yo = float(coords[4]) xo = float(xo)",
"''' x=coords[0] y=coords[1] amplitude = coords[2] xo = float(coords[3]) yo = float(coords[4]) xo",
"n self.opt = opt if type(self.n) != list or type(self.n) != tuple: self.eddies",
"go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step):",
"item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y)",
"amplitude = coords[2] xo = float(coords[3]) yo = float(coords[4]) xo = float(xo) yo",
"ii,jj in eddies_loc]) numzeros = [ii for ii in distance if ii ==",
"ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else: raise ValueError(\"No",
"(float): Amplitud of gaussian. x0 , yo (float): Center of Gausian. sigma_x,sigma_y (float):",
"or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0],",
"'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else: raise ValueError(\"No right input.\") def go_right(self,indexs,step):",
"def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step] def",
"{ 1: self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright,",
"as np import random as rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 +",
"gaussian. x0 , yo (float): Center of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float):",
"def make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up, 2: self.go_right, 3: self.go_left, 4:",
"move_dict = { 1: self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft,",
"= amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0 +",
"!= LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b",
"for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker",
"tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii",
"item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin)",
"or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()]",
"sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset (Float): Gaussian Offset. Returns: g.ravel() (list|array)",
"surface in a list. Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude =",
"== 0] if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker =",
"for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros",
"checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) or loc==True count",
"(distance < away_val*self.b).any() ) or loc==True count = 0 while checker or count",
"for ii,jj in eddies_loc]) numzeros = [ii for ii in distance if ii",
"LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b =",
"b*rnd.uniform(0.7, 1.3) self.x = x self.y = y self.n = n self.opt =",
"in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc])",
"in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys,",
"a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x = x self.y = y self.n",
"sigma_y or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2)",
"g = (x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True:",
"g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0",
"checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for",
"ii in distance if ii == 0] if len(numzeros) <= 1: distance[distance==0]=np.inf else:",
"eddies_loc]) numzeros = [ii for ii in distance if ii == 0] if",
"self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1],",
"= np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin yloc=rnd.randint(0,self.ylen-1)+margin",
"((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) or loc==True count = 0",
"self.opt == 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item",
"[-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x,",
"(float): Deviation. theta (Float): Orientation. offset (Float): Gaussian Offset. Returns: g.ravel() (list|array) -",
"def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction'",
"+ (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g",
"self.n = n self.opt = opt if type(self.n) != list or type(self.n) !=",
"data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1],",
"[step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step]",
"in a list. Usage: Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude = coords[2]",
"go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step):",
"for key,item in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj",
"away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) count =",
"return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y) class Generate_field(): def",
"Check scan_eddym function. ''' x=coords[0] y=coords[1] amplitude = coords[2] xo = float(coords[3]) yo",
"= opt if type(self.n) != list or type(self.n) != tuple: self.eddies = {'eddy_n%s'",
"yo = float(yo) if sigma_y or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) +",
"!= 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c",
"away_val*self.b).any() ) count = count + 1 if loc == True: return self.eddies",
"y. amplitude (float): Amplitud of gaussian. x0 , yo (float): Center of Gausian.",
"return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return",
"pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or",
"slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build a 2D gaussian. Notes: Remmember to",
"keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x",
"while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item",
"count + 1 if loc == True: return self.eddies def make_random_walk(self,indexs, steps): move_dict",
"True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for",
"dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y) class Generate_field():",
"in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin)",
"3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, }",
"self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a",
"Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian surface in a list. Usage: Check",
"+ c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if",
"purposes. Args: coords [x,y] (list|array): Coordinates in x and y. amplitude (float): Amplitud",
"******************* Build a 2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes.",
"else: distance[distance==0] = away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any()",
"= float(xo) yo = float(yo) if sigma_y or sigma_x != 0: a =",
"class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7,",
"self.b = b*rnd.uniform(0.7, 1.3) self.x = x self.y = y self.n = n",
"self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft,",
"range(self.n)} else: raise ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return",
"loc==True count = 0 while checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx,",
"and y. amplitude (float): Amplitud of gaussian. x0 , yo (float): Center of",
"0] if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker = ((distance",
"a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2)",
"Center of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset (Float): Gaussian Offset.",
"-(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2)",
"amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0",
"count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace()",
"as rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN",
"in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0]",
"self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x = x self.y",
"numpy as np import random as rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2",
"Offset. Returns: g.ravel() (list|array) - Gaussian surface in a list. Usage: Check scan_eddym",
"(loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y) class Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y)",
"right input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return",
"< away_val*self.a).any() or (distance < away_val*self.b).any() ) count = count + 1 if",
"2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y]",
"def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if self.opt == 'no_interaction'",
"away_val*self.a).any() or (distance < away_val*self.b).any() ) count = count + 1 if loc",
"def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0] def go_downright(self,indexs,step): return [-step,step] def",
"} #for _ in range(steps): for ii in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)]",
"!= tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for",
"def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0] def",
"gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y =",
"self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x = x self.y =",
"6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for _ in range(steps): for ii",
"< away_val*self.a).any() or (distance < away_val*self.b).any() ) or loc==True count = 0 while",
"sigma_y, theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build a 2D gaussian.",
"go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): '''",
"ii in range(self.n)} else: raise ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step] def",
"if ii == 0] if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a",
"go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step):",
"np import random as rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2)",
"return g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()]",
"self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return",
"float(coords[3]) yo = float(coords[4]) xo = float(xo) yo = float(yo) if sigma_y or",
"== True: return self.eddies def make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up, 2:",
"self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1]",
"return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): ''' ***************",
"(y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in",
"import random as rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) ####",
"sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2)",
"+ (y-yo)*0 return g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item",
"in range(self.n)} else: raise ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step] def go_upright(self,indexs,step):",
"Amplitud of gaussian. x0 , yo (float): Center of Gausian. sigma_x,sigma_y (float): Deviation.",
"count = count + 1 if loc == True: return self.eddies def make_random_walk(self,indexs,",
"g.ravel() def checkposition(self,away_val=5,loc=False): if loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else:",
"np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt == 'int':",
"[x,y] (list|array): Coordinates in x and y. amplitude (float): Amplitud of gaussian. x0",
"data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data)) data=data+gauss return data def",
"== 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin yloc=rnd.randint(0,self.ylen-1)+margin eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[xloc],self.y[yloc]) else: eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[self.eddies[key]['loc'][0][0]+margin],self.y[self.eddies[key]['loc'][0][1]+margin]) return eddy_parms",
"!= list or type(self.n) != tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0,",
"else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]])",
"- (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g = (x-xo)*0 + (y-yo)*0 return",
"def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian *******************",
"== True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()]",
"(float): Center of Gausian. sigma_x,sigma_y (float): Deviation. theta (Float): Orientation. offset (Float): Gaussian",
") count = count + 1 if loc == True: return self.eddies def",
"self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt",
"gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in",
"in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance",
"eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] for key1,item1",
"= float(coords[4]) xo = float(xo) yo = float(yo) if sigma_y or sigma_x !=",
"Generate_field(): def __init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3)",
"== 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in",
"5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright, } #for _ in range(steps):",
"<= 1: distance[distance==0]=np.inf else: distance[distance==0] = away_val*self.a checker = ((distance < away_val*self.a).any() or",
"= y self.n = n self.opt = opt if type(self.n) != list or",
"away_val*self.b).any() ) or loc==True count = 0 while checker or count >= 10000:",
"key1,item1 in self.eddies.items(): xc1=item1['loc'][0][0] yc1=item1['loc'][0][1] distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker =",
"(list|array): Coordinates in x and y. amplitude (float): Amplitud of gaussian. x0 ,",
"loc == True: return self.eddies def make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up,",
"1.3) self.x = x self.y = y self.n = n self.opt = opt",
"gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y] (list|array):",
"(np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2))) else: g =",
"return self.eddies def make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up, 2: self.go_right, 3:",
"b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2) c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp(",
"for ii in range(self.n)} else: raise ValueError(\"No right input.\") def go_right(self,indexs,step): return [0,step]",
"N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if self.opt == 'no_interaction' or self.opt",
"= (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) + c*((y-yo)**2)))",
"newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]])",
"distance if ii == 0] if len(numzeros) <= 1: distance[distance==0]=np.inf else: distance[distance==0] =",
"key,item in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros =",
"[step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return [-step,0]",
"in eddies_loc]) numzeros = [ii for ii in distance if ii == 0]",
"self.go_upleft, 8: self.go_upright, } #for _ in range(steps): for ii in indexs: move_in_a_direction",
"loc == True: eddies_loc=[[rnd.randint(0,self.xlen-1),rnd.randint(0,self.ylen-1)] for key,item in self.eddies.items()] else: eddies_loc=[item['loc'][-1] for key,item in",
"list or type(self.n) != tuple: self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\",
"eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) or",
"2: self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8:",
"pass for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def",
"c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2) g = amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo) +",
"Deviation. theta (Float): Orientation. offset (Float): Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian",
"self.eddies def make_random_walk(self,indexs, steps): move_dict = { 1: self.go_up, 2: self.go_right, 3: self.go_left,",
"1.3)} for ii in range(self.n)} else: raise ValueError(\"No right input.\") def go_right(self,indexs,step): return",
"xo = float(xo) yo = float(yo) if sigma_y or sigma_x != 0: a",
"offset=0): ''' *************** twoD_Gaussian ******************* Build a 2D gaussian. Notes: Remmember to do",
"y self.n = n self.opt = opt if type(self.n) != list or type(self.n)",
"return [step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def go_down(self,indexs,step): return",
"= ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) or loc==True count =",
"1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x = x self.y = y self.n =",
"= float(coords[3]) yo = float(coords[4]) xo = float(xo) yo = float(yo) if sigma_y",
"(distance < away_val*self.b).any() ) count = count + 1 if loc == True:",
"g.ravel() (list|array) - Gaussian surface in a list. Usage: Check scan_eddym function. '''",
"data def pass_args(self,key,margin=50): self.x = np.linspace(min(self.x),max(self.x),self.xlen+2*margin) self.y = np.linspace(min(self.y),max(self.y),self.ylen+2*margin) X,Y=np.meshgrid(self.x,self.y) if self.opt ==",
"checker or count >= 10000: newx=rnd.randint(0,self.xlen-1) newy=rnd.randint(0,self.ylen-1) self.eddies[key1]['loc']=[[newx, newy]] eddies_loc=[item['loc'][-1] for key,item in",
"self.opt == 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin yloc=rnd.randint(0,self.ylen-1)+margin eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[xloc],self.y[yloc]) else: eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[self.eddies[key]['loc'][0][0]+margin],self.y[self.eddies[key]['loc'][0][1]+margin]) return",
"coords [x,y] (list|array): Coordinates in x and y. amplitude (float): Amplitud of gaussian.",
"def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return [0,-step] def go_downleft(self,indexs,step): return [-step,-step] def",
"in self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros = [ii",
"= {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)}",
"self.go_right, 3: self.go_left, 4: self.go_down, 5: self.go_downleft, 6: self.go_downright, 7: self.go_upleft, 8: self.go_upright,",
"return [-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0,",
"[-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian",
"= count + 1 if loc == True: return self.eddies def make_random_walk(self,indexs, steps):",
"self.eddies = {'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in",
"return [step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step] def go_left(self,indexs,step): return",
"sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build a 2D",
"% ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else: raise",
"Build a 2D gaussian. Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args:",
"assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin)) for t in range(N): #pdb.set_trace() if self.opt == 'no_interaction' or",
"def go_upright(self,indexs,step): return [step,step] def go_up(self,indexs,step): return [step,0] def go_upleft(self,indexs,step): return [step,-step] def",
"in indexs: move_in_a_direction = move_dict[rnd.randint(1, 8)] movcood=move_in_a_direction(ii,steps) return indexs[0]+movcood[0],indexs[1]+movcood[1] def assemble_field(self, N,margin=50): data=np.zeros((N,self.xlen+2*margin,self.ylen+2*margin))",
"random as rnd import pdb def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG",
"self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x = x",
"in range(N): #pdb.set_trace() if self.opt == 'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else:",
"ii,jj in eddies_loc]) distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any()",
"((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) count = count + 1",
"steps): move_dict = { 1: self.go_up, 2: self.go_right, 3: self.go_left, 4: self.go_down, 5:",
"coords, sigma_x, sigma_y, theta, slopex=0, slopey=0, offset=0): ''' *************** twoD_Gaussian ******************* Build a",
"Returns: g.ravel() (list|array) - Gaussian surface in a list. Usage: Check scan_eddym function.",
"distance[distance==0]=away_val*self.a checker = ((distance < away_val*self.a).any() or (distance < away_val*self.b).any() ) or loc==True",
"return data def reconstruct_field(self): data=np.zeros((self.xlen,self.ylen)) for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data))",
"def dist(loc1,loc2): return np.sqrt((loc1[0]-loc2[0])**2 + (loc2[1]-loc1[1])**2) #### BUG WHEN LEN(x) != LEN(y) class",
"X,Y=np.meshgrid(self.x,self.y) if self.opt == 'interaction' or self.opt == 'int': xloc=rnd.randint(0,self.xlen-1)+margin yloc=rnd.randint(0,self.ylen-1)+margin eddy_parms=(X,Y,self.eddies[key]['amp'],self.x[xloc],self.y[yloc]) else:",
"for keys, item in self.eddies.items(): gauss=self.twoD_Gaussian(self.pass_args(keys,margin),item['radius'][0], item['radius'][1], item['angle']).reshape(np.shape(data[0,:,:])) data[t,:,:]=data[t,:,:]+gauss return data def reconstruct_field(self):",
"__init__(self,a,b,n,x,y,opt=''): self.xlen=len(x) self.ylen=len(y) self.a = a*rnd.uniform(0.7, 1.3) self.b = b*rnd.uniform(0.7, 1.3) self.x =",
"xo = float(coords[3]) yo = float(coords[4]) xo = float(xo) yo = float(yo) if",
"Notes: Remmember to do g.ravel().reshape(len(x),len(y)) for plotting purposes. Args: coords [x,y] (list|array): Coordinates",
"{'eddy_n%s' % ii:{'loc':[[rnd.randint(0,self.xlen-1),\\ rnd.randint(0,self.ylen-1)]],'grow':True,\\ 'radius':[self.a,self.b],'angle':rnd.uniform(0, 2*np.pi),\\ 'amp':rnd.choice([-1,1])*rnd.uniform(0.7, 1.3)} for ii in range(self.n)} else:",
"[-step,0] def go_downright(self,indexs,step): return [-step,step] def twoD_Gaussian(self, coords, sigma_x, sigma_y, theta, slopex=0, slopey=0,",
"(Float): Gaussian Offset. Returns: g.ravel() (list|array) - Gaussian surface in a list. Usage:",
"self.eddies.items()] #pdb.set_trace() xc1=newx yc1=newy distance=np.array([dist([self.x[xc1],self.y[yc1]],[self.x[ii],self.y[jj]]) for ii,jj in eddies_loc]) numzeros = [ii for",
"'no_interaction' or self.opt == 'Nint': self.eddies=self.checkposition(away_val=5,loc=True) else: pass for keys, item in self.eddies.items():",
"or sigma_x != 0: a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2) b = -(np.sin(2*theta))/(4*sigma_x**2) +"
] |
[
"ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name)",
"the request itself. Note this is example is for demonstration purposes only, and",
"exchange rate from Yahoo Finance using Twisted.\"\"\" def __init__(self, name): self._value = None",
"Run immediately, and then every 30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the",
"values = result.strip().split(\",\") self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse)",
"= \"unavailable, please refresh the page\" return \"Current EUR/USD exchange rate is %s.\"",
"import Flask from twisted.internet.task import LoopingCall from twisted.web.client import getPage from twisted.python import",
"this is example is for demonstration purposes only, and is not actually used",
"seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r",
"latest EUR/USD exchange rate from Yahoo every 30 seconds in the background; the",
"\"\"\" An example of scheduling time-based events in the background. Download the latest",
"in the background; the rendered Flask web page can use the latest value",
"if no value is available. \"\"\" return self._value def start(self): \"\"\"Start the background",
"can use the latest value without having to do the request itself. Note",
"= getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper: class",
"value is available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/')",
"def index(): rate = EURUSD.latest_value() if rate is None: rate = \"unavailable, please",
"\"Current EUR/USD exchange rate is %s.\" % (rate,) if __name__ == '__main__': import",
"flask import Flask from twisted.internet.task import LoopingCall from twisted.web.client import getPage from twisted.python",
"if rate is None: rate = \"unavailable, please refresh the page\" return \"Current",
"Flask web page can use the latest value without having to do the",
"API for downloading exchange rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def",
"= _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange",
"= EURUSD.latest_value() if rate is None: rate = \"unavailable, please refresh the page\"",
"return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index(): rate =",
"30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got",
"@wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange rate value. May be None if",
"twisted.python import log from crochet import wait_for, run_in_reactor, setup setup() # Twisted code:",
"use the latest value without having to do the request itself. Note this",
"#!/usr/bin/python \"\"\" An example of scheduling time-based events in the background. Download the",
"None if no value is available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app",
"External API: def latest_value(self): \"\"\"Return the latest exchange rate value. May be None",
"EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value() if",
"rate from Yahoo Finance using Twisted.\"\"\" def __init__(self, name): self._value = None self._name",
"self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value()",
"def start(self): \"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately, and",
"the page\" return \"Current EUR/USD exchange rate is %s.\" % (rate,) if __name__",
"and following them. \"\"\" from __future__ import print_function from flask import Flask from",
"run_in_reactor, setup setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange rate from",
"= result.strip().split(\",\") self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err)",
"exchange rate is %s.\" % (rate,) if __name__ == '__main__': import sys, logging",
"app = Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value() if rate is None:",
"back from Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value = float(values[1]) d =",
"def latest_value(self): \"\"\"Return the latest exchange rate value. May be None if no",
"\"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately, and then every",
"% (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API",
"is available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def",
"is example is for demonstration purposes only, and is not actually used in",
"# External API: def latest_value(self): \"\"\"Return the latest exchange rate value. May be",
"them. \"\"\" from __future__ import print_function from flask import Flask from twisted.internet.task import",
"self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back",
"Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\" def __init__(self, name):",
"do the request itself. Note this is example is for demonstration purposes only,",
"EURUSD.latest_value() if rate is None: rate = \"unavailable, please refresh the page\" return",
"value is available. \"\"\" return self._value def start(self): \"\"\"Start the background process.\"\"\" self._lc",
"page can use the latest value without having to do the request itself.",
"for downloading exchange rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self):",
"from __future__ import print_function from flask import Flask from twisted.internet.task import LoopingCall from",
"# Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo Finance using",
"request itself. Note this is example is for demonstration purposes only, and is",
"import wait_for, run_in_reactor, setup setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange",
"from crochet import wait_for, run_in_reactor, setup setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download",
"from twisted.python import log from crochet import wait_for, run_in_reactor, setup setup() # Twisted",
"__init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return",
"using Twisted.\"\"\" def __init__(self, name): self._value = None self._name = name # External",
"background process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately, and then every 30 seconds:",
"= LoopingCall(self._download) # Run immediately, and then every 30 seconds: self._lc.start(30, now=True) def",
"<filename>examples/scheduling.py #!/usr/bin/python \"\"\" An example of scheduling time-based events in the background. Download",
"return self._value def start(self): \"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download) # Run",
"every 30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result):",
"if no value is available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app =",
"rate is None: rate = \"unavailable, please refresh the page\" return \"Current EUR/USD",
"example is for demonstration purposes only, and is not actually used in the",
"the background process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately, and then every 30",
"value. May be None if no value is available. \"\"\" return self._exchange.latest_value() EURUSD",
"= Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value() if rate is None: rate",
"application without reading Yahoo's terms-of-service and following them. \"\"\" from __future__ import print_function",
"world. You should not do this in a real application without reading Yahoo's",
"latest value without having to do the request itself. Note this is example",
"= float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d #",
"only, and is not actually used in the real world. You should not",
"LoopingCall from twisted.web.client import getPage from twisted.python import log from crochet import wait_for,",
"code: class _ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo Finance using Twisted.\"\"\" def",
"30 seconds in the background; the rendered Flask web page can use the",
"please refresh the page\" return \"Current EUR/USD exchange rate is %s.\" % (rate,)",
"in a real application without reading Yahoo's terms-of-service and following them. \"\"\" from",
"class ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\" def __init__(self, name): self._exchange =",
"setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo Finance",
"no value is available. \"\"\" return self._value def start(self): \"\"\"Start the background process.\"\"\"",
"example of scheduling time-based events in the background. Download the latest EUR/USD exchange",
"return \"Current EUR/USD exchange rate is %s.\" % (rate,) if __name__ == '__main__':",
"the real world. You should not do this in a real application without",
"following them. \"\"\" from __future__ import print_function from flask import Flask from twisted.internet.task",
"self._value = None self._name = name # External API: def latest_value(self): \"\"\"Return the",
"self._lc = LoopingCall(self._download) # Run immediately, and then every 30 seconds: self._lc.start(30, now=True)",
"return d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\"",
"d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\" def",
"real world. You should not do this in a real application without reading",
"def __init__(self, name): self._value = None self._name = name # External API: def",
"Flask from twisted.internet.task import LoopingCall from twisted.web.client import getPage from twisted.python import log",
"getPage from twisted.python import log from crochet import wait_for, run_in_reactor, setup setup() #",
"def parse(result): print(\"Got %r back from Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value",
"from flask import Flask from twisted.internet.task import LoopingCall from twisted.web.client import getPage from",
"EUR/USD exchange rate from Yahoo every 30 seconds in the background; the rendered",
"exchange rate from Yahoo every 30 seconds in the background; the rendered Flask",
"from twisted.web.client import getPage from twisted.python import log from crochet import wait_for, run_in_reactor,",
"is not actually used in the real world. You should not do this",
"real application without reading Yahoo's terms-of-service and following them. \"\"\" from __future__ import",
"result.strip().split(\",\") self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return",
"Note this is example is for demonstration purposes only, and is not actually",
"be None if no value is available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\")",
"Yahoo's terms-of-service and following them. \"\"\" from __future__ import print_function from flask import",
"% (rate,) if __name__ == '__main__': import sys, logging logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) EURUSD.start() app.run()",
"rate value. May be None if no value is available. \"\"\" return self._exchange.latest_value()",
"= None self._name = name # External API: def latest_value(self): \"\"\"Return the latest",
"None self._name = name # External API: def latest_value(self): \"\"\"Return the latest exchange",
"\"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index(): rate",
"wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\" def __init__(self, name): self._exchange",
"Finance using Twisted.\"\"\" def __init__(self, name): self._value = None self._name = name #",
"available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index():",
"class _ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo Finance using Twisted.\"\"\" def __init__(self,",
"the latest exchange rate value. May be None if no value is available.",
"twisted.internet.task import LoopingCall from twisted.web.client import getPage from twisted.python import log from crochet",
"d.addErrback(log.err) return d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading exchange",
"None: rate = \"unavailable, please refresh the page\" return \"Current EUR/USD exchange rate",
"now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back from",
"in the real world. You should not do this in a real application",
"should not do this in a real application without reading Yahoo's terms-of-service and",
"terms-of-service and following them. \"\"\" from __future__ import print_function from flask import Flask",
"and is not actually used in the real world. You should not do",
"\"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back from Yahoo.\" % (result,))",
"_ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange rate",
"Twisted.\"\"\" def __init__(self, name): self._value = None self._name = name # External API:",
"without reading Yahoo's terms-of-service and following them. \"\"\" from __future__ import print_function from",
"latest_value(self): \"\"\"Return the latest exchange rate value. May be None if no value",
"\"\"\"Return the latest exchange rate value. May be None if no value is",
"\"\"\" return self._value def start(self): \"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download) #",
"then every 30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def",
"= ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value() if rate",
"d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper:",
"Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value() if rate is None: rate =",
"float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking",
"the background; the rendered Flask web page can use the latest value without",
"\"unavailable, please refresh the page\" return \"Current EUR/USD exchange rate is %s.\" %",
"events in the background. Download the latest EUR/USD exchange rate from Yahoo every",
"\"\"\"Blocking API for downloading exchange rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor",
"None if no value is available. \"\"\" return self._value def start(self): \"\"\"Start the",
"wait_for, run_in_reactor, setup setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange rate",
"having to do the request itself. Note this is example is for demonstration",
"the background. Download the latest EUR/USD exchange rate from Yahoo every 30 seconds",
"name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the",
"start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange rate value. May be",
"itself. Note this is example is for demonstration purposes only, and is not",
"Yahoo every 30 seconds in the background; the rendered Flask web page can",
"self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange rate value. May be None",
"web page can use the latest value without having to do the request",
"Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo Finance using Twisted.\"\"\"",
"refresh the page\" return \"Current EUR/USD exchange rate is %s.\" % (rate,) if",
"print(\"Got %r back from Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value = float(values[1])",
"time-based events in the background. Download the latest EUR/USD exchange rate from Yahoo",
"do this in a real application without reading Yahoo's terms-of-service and following them.",
"def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back from Yahoo.\"",
"not actually used in the real world. You should not do this in",
"getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper: class ExchangeRate(object):",
"rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def",
"index(): rate = EURUSD.latest_value() if rate is None: rate = \"unavailable, please refresh",
"start(self): \"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately, and then",
"rendered Flask web page can use the latest value without having to do",
"actually used in the real world. You should not do this in a",
"background. Download the latest EUR/USD exchange rate from Yahoo every 30 seconds in",
"from Yahoo every 30 seconds in the background; the rendered Flask web page",
"a real application without reading Yahoo's terms-of-service and following them. \"\"\" from __future__",
"\"\"\"Download an exchange rate from Yahoo Finance using Twisted.\"\"\" def __init__(self, name): self._value",
"be None if no value is available. \"\"\" return self._value def start(self): \"\"\"Start",
"is None: rate = \"unavailable, please refresh the page\" return \"Current EUR/USD exchange",
"EUR/USD exchange rate is %s.\" % (rate,) if __name__ == '__main__': import sys,",
"Yahoo Finance using Twisted.\"\"\" def __init__(self, name): self._value = None self._name = name",
"(self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for",
"without having to do the request itself. Note this is example is for",
"rate is %s.\" % (rate,) if __name__ == '__main__': import sys, logging logging.basicConfig(stream=sys.stderr,",
"rate value. May be None if no value is available. \"\"\" return self._value",
"(result,)) values = result.strip().split(\",\") self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,))",
"value without having to do the request itself. Note this is example is",
"rate from Yahoo every 30 seconds in the background; the rendered Flask web",
"__future__ import print_function from flask import Flask from twisted.internet.task import LoopingCall from twisted.web.client",
"the rendered Flask web page can use the latest value without having to",
"import LoopingCall from twisted.web.client import getPage from twisted.python import log from crochet import",
"name # External API: def latest_value(self): \"\"\"Return the latest exchange rate value. May",
"# Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading exchange rate.\"\"\" def __init__(self,",
"page\" return \"Current EUR/USD exchange rate is %s.\" % (rate,) if __name__ ==",
"value. May be None if no value is available. \"\"\" return self._value def",
"is available. \"\"\" return self._value def start(self): \"\"\"Start the background process.\"\"\" self._lc =",
"def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self):",
"An example of scheduling time-based events in the background. Download the latest EUR/USD",
"twisted.web.client import getPage from twisted.python import log from crochet import wait_for, run_in_reactor, setup",
"May be None if no value is available. \"\"\" return self._value def start(self):",
"immediately, and then every 30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\"",
"_download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back from Yahoo.\" %",
"in the background. Download the latest EUR/USD exchange rate from Yahoo every 30",
"purposes only, and is not actually used in the real world. You should",
"available. \"\"\" return self._value def start(self): \"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download)",
"% (result,)) values = result.strip().split(\",\") self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" %",
"an exchange rate from Yahoo Finance using Twisted.\"\"\" def __init__(self, name): self._value =",
"\"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking",
"used in the real world. You should not do this in a real",
"not do this in a real application without reading Yahoo's terms-of-service and following",
"latest exchange rate value. May be None if no value is available. \"\"\"",
"__init__(self, name): self._value = None self._name = name # External API: def latest_value(self):",
"exchange rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1)",
"You should not do this in a real application without reading Yahoo's terms-of-service",
"print_function from flask import Flask from twisted.internet.task import LoopingCall from twisted.web.client import getPage",
"is %s.\" % (rate,) if __name__ == '__main__': import sys, logging logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)",
"Download the latest EUR/USD exchange rate from Yahoo every 30 seconds in the",
"scheduling time-based events in the background. Download the latest EUR/USD exchange rate from",
"self._value def start(self): \"\"\"Start the background process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately,",
"May be None if no value is available. \"\"\" return self._exchange.latest_value() EURUSD =",
"import log from crochet import wait_for, run_in_reactor, setup setup() # Twisted code: class",
"crochet import wait_for, run_in_reactor, setup setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download an",
"name): self._value = None self._name = name # External API: def latest_value(self): \"\"\"Return",
"rate = \"unavailable, please refresh the page\" return \"Current EUR/USD exchange rate is",
"%r back from Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value = float(values[1]) d",
"every 30 seconds in the background; the rendered Flask web page can use",
"for demonstration purposes only, and is not actually used in the real world.",
"reading Yahoo's terms-of-service and following them. \"\"\" from __future__ import print_function from flask",
"= name # External API: def latest_value(self): \"\"\"Return the latest exchange rate value.",
"from Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value = float(values[1]) d = getPage(",
"exchange rate value. May be None if no value is available. \"\"\" return",
"Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\"",
"self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest",
"no value is available. \"\"\" return self._exchange.latest_value() EURUSD = ExchangeRate(\"EURUSD\") app = Flask(__name__)",
"seconds in the background; the rendered Flask web page can use the latest",
"API: def latest_value(self): \"\"\"Return the latest exchange rate value. May be None if",
"the latest value without having to do the request itself. Note this is",
"self._name = name # External API: def latest_value(self): \"\"\"Return the latest exchange rate",
"the latest EUR/USD exchange rate from Yahoo every 30 seconds in the background;",
"background; the rendered Flask web page can use the latest value without having",
"setup setup() # Twisted code: class _ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo",
"process.\"\"\" self._lc = LoopingCall(self._download) # Run immediately, and then every 30 seconds: self._lc.start(30,",
"parse(result): print(\"Got %r back from Yahoo.\" % (result,)) values = result.strip().split(\",\") self._value =",
"ExchangeRate(\"EURUSD\") app = Flask(__name__) @app.route('/') def index(): rate = EURUSD.latest_value() if rate is",
"is for demonstration purposes only, and is not actually used in the real",
"@run_in_reactor def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange rate value.",
"demonstration purposes only, and is not actually used in the real world. You",
"\"\"\" from __future__ import print_function from flask import Flask from twisted.internet.task import LoopingCall",
"from Yahoo Finance using Twisted.\"\"\" def __init__(self, name): self._value = None self._name =",
"page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back from Yahoo.\" % (result,)) values =",
"this in a real application without reading Yahoo's terms-of-service and following them. \"\"\"",
"log from crochet import wait_for, run_in_reactor, setup setup() # Twisted code: class _ExchangeRate(object):",
"print(\"Downloading!\") def parse(result): print(\"Got %r back from Yahoo.\" % (result,)) values = result.strip().split(\",\")",
"self._value = float(values[1]) d = getPage( \"http://download.finance.yahoo.com/d/quotes.csv?e=.csv&f=c4l1&s=%s=X\" % (self._name,)) d.addCallback(parse) d.addErrback(log.err) return d",
"rate = EURUSD.latest_value() if rate is None: rate = \"unavailable, please refresh the",
"and then every 30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download the page.\"\"\" print(\"Downloading!\")",
"# Run immediately, and then every 30 seconds: self._lc.start(30, now=True) def _download(self): \"\"\"Download",
"of scheduling time-based events in the background. Download the latest EUR/USD exchange rate",
"LoopingCall(self._download) # Run immediately, and then every 30 seconds: self._lc.start(30, now=True) def _download(self):",
"def start(self): self._exchange.start() @wait_for(timeout=1) def latest_value(self): \"\"\"Return the latest exchange rate value. May",
"d.addCallback(parse) d.addErrback(log.err) return d # Blocking wrapper: class ExchangeRate(object): \"\"\"Blocking API for downloading",
"from twisted.internet.task import LoopingCall from twisted.web.client import getPage from twisted.python import log from",
"the page.\"\"\" print(\"Downloading!\") def parse(result): print(\"Got %r back from Yahoo.\" % (result,)) values",
"import getPage from twisted.python import log from crochet import wait_for, run_in_reactor, setup setup()",
"_ExchangeRate(object): \"\"\"Download an exchange rate from Yahoo Finance using Twisted.\"\"\" def __init__(self, name):",
"downloading exchange rate.\"\"\" def __init__(self, name): self._exchange = _ExchangeRate(name) @run_in_reactor def start(self): self._exchange.start()",
"@app.route('/') def index(): rate = EURUSD.latest_value() if rate is None: rate = \"unavailable,",
"import print_function from flask import Flask from twisted.internet.task import LoopingCall from twisted.web.client import",
"%s.\" % (rate,) if __name__ == '__main__': import sys, logging logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) EURUSD.start()",
"to do the request itself. Note this is example is for demonstration purposes"
] |
[
"<reponame>josemarin7/Python-OpenCV-Recognition-via-Camera<gh_stars>0 import cv2 import numpy as np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0)",
"= cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body",
"cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body =",
"cv2 import numpy as np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face =",
"numpy as np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180]",
"import numpy as np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195,",
"img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0)",
"cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body = img[20:, 35:210] cv2.imshow(\"Body\",",
"= img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body = img[20:, 35:210] cv2.imshow(\"Body\", body) cv2.waitKey(0)",
"cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body = img[20:,",
"img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body = img[20:, 35:210] cv2.imshow(\"Body\", body) cv2.waitKey(0) cv2.destroyAllWindows()",
"import cv2 import numpy as np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face",
"as np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\",",
"face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body = img[20:, 35:210] cv2.imshow(\"Body\", body)",
"np img = cv2.imread(\"lena256rgb.jpg\") cv2.imshow(\"Normal\", img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face)",
"img) cv2.waitKey(0) face = img[95:195, 100:180] cv2.imshow(\"Face\", face) cv2.waitKey(0) body = img[20:, 35:210]"
] |
[
"imgaug.augmenters as iaa class Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([",
"50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30,",
"]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5),",
"iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ]) ], random_order=True) def __call__(self,",
"1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5,",
"per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45,",
"def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5),",
"__init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05),",
"15), 'y': (-15, 15)}) ]) ], random_order=True) def __call__(self, sample): image, label =",
"(-15, 15)}) ]) ], random_order=True) def __call__(self, sample): image, label = sample['image'], sample['label']",
"iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15,",
"iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45])",
"(-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)})",
"class Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05,",
"(-15, 15), 'y': (-15, 15)}) ]) ], random_order=True) def __call__(self, sample): image, label",
"45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([",
"#iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)),",
"<reponame>meet-minimalist/Learn-pytorch-in-one-example<filename>utils/transforms/augmenter.py import imgaug.augmenters as iaa class Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2),",
"[ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([",
"add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2,",
"1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y':",
"size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([",
"import imgaug.augmenters as iaa class Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [",
"iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5,",
"], random_order=True) def __call__(self, sample): image, label = sample['image'], sample['label'] image = self.seq_aug(image=image)",
"iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15,",
"as iaa class Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1,",
"angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]),",
"30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ]) ], random_order=True) def __call__(self, sample):",
"iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8,",
"0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5,",
"2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]),",
"1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2,",
"iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}),",
"iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ]) ], random_order=True) def __call__(self, sample): image,",
"__call__(self, sample): image, label = sample['image'], sample['label'] image = self.seq_aug(image=image) return {'image':image, 'label':label}",
"0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ])",
"iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]),",
"= iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1,",
"iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15),",
"iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x':",
"'y': (-15, 15)}) ]) ], random_order=True) def __call__(self, sample): image, label = sample['image'],",
"iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)),",
"self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03,",
"'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ]) ],",
"]) ], random_order=True) def __call__(self, sample): image, label = sample['image'], sample['label'] image =",
"iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y':",
"0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3,",
"iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)),",
"iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50))",
"(-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ]) ], random_order=True)",
"iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)),",
"random_order=True) def __call__(self, sample): image, label = sample['image'], sample['label'] image = self.seq_aug(image=image) return",
"iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30,",
"1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x':",
"]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5),",
"iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2))",
"iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)),",
"Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)), iaa.CoarseDropout(0.05, size_percent=0.1,",
"30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50, 50)) ]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2),",
"5)), iaa.MotionBlur(k=5, angle=[-45, 45]) ]), iaa.OneOf([ iaa.MultiplyAndAddToBrightness(mul=(0.5, 1.5), add=(-30, 30)), iaa.Grayscale(alpha=(0.5, 1.0)), iaa.AddToHueAndSaturation((-50,",
"0.2)}), iaa.Affine(rotate=(-30, 30)), iaa.Affine(shear={'x': (-15, 15), 'y': (-15, 15)}) ]) ], random_order=True) def",
"15)}) ]) ], random_order=True) def __call__(self, sample): image, label = sample['image'], sample['label'] image",
"iaa class Augmenter(object): def __init__(self): self.seq_aug = iaa.SomeOf((1, 2), [ iaa.OneOf([ #iaa.Dropout(p=(0.1, 0.2)),",
"size_percent=0.1, per_channel=0.5), iaa.SaltAndPepper(0.05), iaa.CoarseSaltAndPepper(0.03, size_percent=(0.1, 0.2)) ]), iaa.OneOf([ iaa.GaussianBlur(sigma=(0.5, 1.0)), iaa.MedianBlur(k=(3, 5)), iaa.MotionBlur(k=5,",
"]), iaa.OneOf([ iaa.Fliplr(0.5), iaa.Affine(scale=(0.8, 1.2)), iaa.Affine(translate_percent={'x': (-0.2, 0.2), 'y': (-0.2, 0.2)}), iaa.Affine(rotate=(-30, 30)),",
"def __call__(self, sample): image, label = sample['image'], sample['label'] image = self.seq_aug(image=image) return {'image':image,"
] |
[
".cls_head import ClsHead from .linear_head import LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head",
"All rights reserved. from .cls_head import ClsHead from .linear_head import LinearClsHead from .orientation_head",
"reserved. from .cls_head import ClsHead from .linear_head import LinearClsHead from .orientation_head import OrientationHead",
"rights reserved. from .cls_head import ClsHead from .linear_head import LinearClsHead from .orientation_head import",
"ClsHead from .linear_head import LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead",
"import LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ = [",
".orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ = [ 'ClsHead', 'LinearClsHead', 'OrientationHead',",
"Copyright (c) OpenMMLab. All rights reserved. from .cls_head import ClsHead from .linear_head import",
"(c) OpenMMLab. All rights reserved. from .cls_head import ClsHead from .linear_head import LinearClsHead",
"# Copyright (c) OpenMMLab. All rights reserved. from .cls_head import ClsHead from .linear_head",
"OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ = [ 'ClsHead', 'LinearClsHead', 'OrientationHead', 'ResNetOrientationHead' ]",
".linear_head import LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ =",
"from .cls_head import ClsHead from .linear_head import LinearClsHead from .orientation_head import OrientationHead from",
"LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ = [ 'ClsHead',",
"OpenMMLab. All rights reserved. from .cls_head import ClsHead from .linear_head import LinearClsHead from",
"import ClsHead from .linear_head import LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head import",
"import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ = [ 'ClsHead', 'LinearClsHead', 'OrientationHead', 'ResNetOrientationHead'",
"from .linear_head import LinearClsHead from .orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__",
"from .orientation_head import OrientationHead from .resnet_orientation_head import ResNetOrientationHead __all__ = [ 'ClsHead', 'LinearClsHead',"
] |
[
"data from Our World In Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to",
"new src_roi = src_trim[src_trim['region'] == roi] # filter rows that match roi df_combined",
"included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can",
"delta = delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are",
"cases back into dataframe. Args: roi (str): Region we are working with; used",
"Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we",
"# for appending rois that don't have testing data for roi in roi_codes_dict:",
"%s\" % country) source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for",
"= datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d:",
"path to data directory. Returns: None \"\"\" # Where JHU stores their data",
"to data directory. plot (bool): Whether to plot the changes. Returns: None \"\"\"",
"# now open csvs in data_path that match rois and merge on csv",
"'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia':",
"df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 # Handle",
"numpy as np import pandas as pd import requests from tqdm import tqdm",
"'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except:",
"filled cumulative counts and perform new cases calculation, then merge those new cases",
"= data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions",
"in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path}",
"x in data_path.iterdir() if 'covidtimeseries' in str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi",
"timeseries CSV print(\"OWID global test results missing for: \") for roi in roi_codes_dict:",
"'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, 'CA_' +",
"y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def",
"df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered'] for i in to_remove: if",
"sort by datetime obj before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert",
"tests data for {roi}.') def daterange(date1, date2): for n in range(int ((date2 -",
"will append dfs for cases, deaths, recovered here # URL for API call",
"for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation df = df.fillna(-1).astype(int)",
"%s\" % ','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\" Check if we have",
"cumulative and now new counts. \"\"\" dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True)",
"good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\" %",
"bool] = False) -> None: \"\"\" Scrape JHU for US State level test",
"is people_tested and then switches to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] =",
"new counts to -1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1",
"data from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full",
"on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL',",
"df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv'",
"Args: d (pd.DataFrame): Data from JHU tracker (e.g. df['global]). filter (bool, optional): Whether",
"= 'dummy_' + cum_col new_col = 'new_' + col try: start = df_tmp[df_tmp[cum_col]",
"plt import numpy as np import pandas as pd import requests from tqdm",
"df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download tests data for",
"get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get a list of countries from",
"province for province in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province] #",
"level test results. Data is stored as a collection of CSVs per date",
"1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV =",
"neg_index = diff[diff < 0].index df.loc[neg_index, cum] += 1 else: break # Then",
"fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated']",
"1 else: break # Then repeat if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index,",
"World In Data https://github.com/owid/covid-19-data Add columns to global csvs in data_path. \"\"\" url",
"= pd.read_csv(url) except HTTPError: print(\"Could not download state-level data for Brazil\") state_code =",
"sorted_dfs = [] for roi in rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi",
"bool] = True, fixes: bool = False) -> None: \"\"\" Get state-level data",
"import tqdm from typing import Union from urllib.error import HTTPError import urllib.request, json",
"data_path (str): Full path to data directory. plot (bool): Whether to plot the",
"pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) #",
"if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove {}. Check",
"= pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values",
"starts reporting end_dt = date.today() dates = [] delta = end_dt - start_dt",
"= pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2']",
"import HTTPError import urllib.request, json import os from datetime import timedelta, date import",
"days of data. This only affects a small number of regions. It overwrites",
"only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to data directory. Returns: None",
"df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() #",
"-> None: \"\"\" Gets data from Canada's Open Covid group for Canadian Provinces.",
"source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3",
"= src_trim[src_trim['region'] == roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations',",
"axis=1, inplace=True) # drop so we can add new src_roi = src_trim[src_trim['Alpha-3 code']",
"data as recovered if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values # check if",
"by nulling such data and applying a monotonic spline interpolation in between valid",
"new cases calculation, then merge those new cases back into dataframe. Args: roi",
"bool = False) -> None: \"\"\" Get US vaccines data from Our World",
"in ['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s' for",
"in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]:",
"for c in columns: cum = 'cum_%s' % c new = 'new_%s' %",
"= src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending rois that don't have",
"data.') pass for i in df_timeseries.columns: # Check if OWID testng data already",
"{}\".format(country)) continue # If we have data in the downloaded JHU files for",
"back into dataframe. Args: roi (str): Region we are working with; used for",
"kind == 'cases': source.drop('cases', axis=1, inplace=True) # removing this column so # we",
"df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] =",
"country if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] =",
"def remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions no longer tracked, such as:",
"%s\" % (kind, region)) else: if region == 'global': has_no_province = df['Province/State'].isnull() #",
"except: print(\"Could not add population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort",
"rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove",
"a loop which: while True: # Interpolates the cumulative column nulls to have",
"cases where column is people_tested and then switches to Total_Test_Results if 'People_Tested' in",
"before, label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw",
"daily data. The purpose of this script is to fix spurious negative values",
"df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif region == 'US': # Use",
"df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could not get",
"get_population_count(data_path, roi) df['population'] = population except: print(\"Could not add population data for {}\".format(state))",
"-1 for missing data and the difference is taken between a new cumulative",
"'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC',",
"= source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True)",
"create new counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and now new",
"drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are",
"string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2') #",
"= pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] =",
"except: print(f'No {cum_col} data to add for {roi}.') df_ffill[new_col] = -1 df_ffill =",
"of data across all states'): url = url_template % i try: df =",
"pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values",
"cumulative tests are named 'Total_Test_Results' after 200 ish days dfs = [] for",
"(values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique()",
"of dataframes # Generate a list of countries that have \"good\" data, #",
"above. Args: data_path (str): Full path to data directory. plot (bool): Whether to",
"# Protect against 0 null final value which screws up interpolator if np.isnan(df.loc[df.index[-1],",
"trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix",
"df = pd.read_csv(csv) # Exclude final day because it is often a partial",
"'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU',",
"= source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] =",
"more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif region == 'US': # Use state",
"= False) -> None: \"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\"",
"source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try:",
"HTTPError: print(\"Could not download tests data for %s\" % i) df_combined = pd.concat(dfs)",
"pass for i in df_timeseries.columns: # Check if OWID testng data already included",
"delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested'",
"this script is to fix spurious negative values in new daily numbers. For",
"'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND',",
"tqdm(good_countries, desc='Countries'): # For each country if country in ['Diamond Princess', 'Grand Princess',",
"temp new recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation",
"df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') #",
"'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire':",
"columns: list = ['cases', 'deaths', 'recover'], plot: bool = False) -> pd.DataFrame: \"\"\"Used",
"bool] = True, fixes: bool = False) -> None: \"\"\" Get US vaccines",
"= [] delta = end_dt - start_dt delta = delta.days for dt in",
"= df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No data for %s\" %",
"starting on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality',",
"to a value less than N on a subsequent day. This script fixes",
"= source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\",
"pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim",
"if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list):",
"into date on all 3 dfs at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True)",
"%s, %s\" % (kind, region)) else: if region == 'global': has_no_province = df['Province/State'].isnull()",
"'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths']",
"dataframe with forward filled cumulative counts and perform new cases calculation, then merge",
"True): \"\"\"Get a list of countries from a global dataframe optionally passing a",
"have population count for roi and add to timeseries df if we do.",
"= non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find the bad entries and null",
"(<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could not",
"pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['ISO code'].values",
"%s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois =",
"True: # Interpolates the cumulative column nulls to have # monotonic growth after",
"'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do",
"'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path:",
"for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and now new counts. \"\"\" dfs",
"# Where JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape",
"\"\"\" Get US vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns",
"= url_template % i try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2'])",
"== 'global': has_no_province = df['Province/State'].isnull() # Whole countries only; use country name as",
"dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No data for %s\"",
"= population except: pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting",
"e.g 'US_MI' or 'Greece'. columns (list, optional): Columns to make non-decreasing. Defaults to",
"One region, e.g 'US_MI' or 'Greece'. columns (list, optional): Columns to make non-decreasing.",
"source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values",
"= maxx # Then run a loop which: while True: # Interpolates the",
"state in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue #",
"url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict =",
"= str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from",
"= src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique()",
"source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True) df",
"df, columns: list): \"\"\" There are cases where cum counts go missing and",
"== state] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases',",
"cumulative counts decrease and new_tests becomes a large negative number df_combined[['cum_tests', 'new_tests']] =",
"df_tmp.reset_index(inplace=True) for col in columns: cum_col = 'cum_' + col dummy_cum_col = 'dummy_'",
"'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA',",
"= good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\" % ','.join(good))",
"do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio",
"= df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame,",
"as recovered if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values # check if roi",
"False) -> None: \"\"\" Gets data from Canada's Open Covid group for Canadian",
"columns # 20 or 21 signifies 2020 or 2021 dfs[region][kind] = df #",
"download state-level data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito",
"'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) #",
"= df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True)",
"= pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data path.\") try: for i in",
"= pd.read_csv(url) # Download the data into a dataframe except HTTPError: print(\"Could not",
"named 'People_Tested' for first 200 ish days # then cumulative tests are named",
"= df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths',",
"https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to data directory. Returns: None \"\"\" #",
"Full path to data directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate",
"df.loc[neg_index, cum] += 1 else: break # Then repeat if plot: plt.figure() plt.plot(df.index,",
"print(\"Could not download tests data for %s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State',",
"no longer tracked, such as: Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands,",
"i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new df_roi_tests =",
"bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\" % ','.join(good)) # print(\"JHU",
"src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict()",
"us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'): # For each country",
"typing import Union from urllib.error import HTTPError import urllib.request, json import os from",
"Province-level timeseries data starting on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind",
"columns which are negative bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] =",
"check if roi reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover']",
"'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3",
"run a loop which: while True: # Interpolates the cumulative column nulls to",
"# cumulative counts decrease and new_tests becomes a large negative number df_result['new_tests'] =",
"Union[dict, bool] = True) -> None: \"\"\"Gets data from Johns Hopkins CSSEGIS (countries",
"When JHU starts reporting end_dt = date.today() dates = [] delta = end_dt",
"= df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] = None # 2) New daily",
"= df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif region == 'US': #",
"`fix_negatives` to fix negatives values for a single region. This function uses monotonic",
"to dictionary of dataframes # Generate a list of countries that have \"good\"",
"between a new cumulative count and -1. We don't want it to spike,",
"% i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique()",
"data_path (str): Full path to data directory. Returns: None \"\"\" # Where JHU",
"'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state']",
"# Check if testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1,",
"df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi = 'BR_' + state_code[state] population =",
"{} for kind in ['confirmed', 'deaths', 'recovered']: url = url_template % (kind, region)",
"df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0,",
"FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID data.') pass for i in",
"= pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try: population = df_pop.query('roi",
"tracked, such as: Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS",
"pd.DataFrame: [description] \"\"\" for c in columns: cum = 'cum_%s' % c new",
"on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df =",
"def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False) -> None: \"\"\" Scrape JHU",
"Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = [] # we will append dfs for",
"'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico':",
"df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim =",
"== roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True,",
"20 or 21 signifies 2020 or 2021 dfs[region][kind] = df # Add to",
"should not go from N to a value less than N on a",
"has_no_province = df['Province/State'].isnull() # Whole countries only; use country name as index df1",
"global dataframe optionally passing a quality check Args: d (pd.DataFrame): Data from JHU",
"as index df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for country in ['China', 'Canada',",
"\"Vanuatu\"]: # skipping because no data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv'",
"% i try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] =",
"roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_:",
"for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude",
"= False) -> None: \"\"\" Get global vaccines data from Our World In",
"= src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\",",
"roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final day because it is",
"US vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns to US",
"so we can add new df_roi_tests = df_tests[df_tests['roi'] == roi] # filter down",
"de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa",
"tqdm(dates, desc=f'Scraping {delta} days of data across all states'): url = url_template %",
"return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool = False) -> None: \"\"\"Fix",
"is to fix spurious negative values in new daily numbers. For example, the",
"df['new_deaths'] try: population = get_population_count(data_path, 'CA_' + province) df['population'] = population except: pass",
"def daterange(date1, date2): for n in range(int ((date2 - date1).days)+1): yield date1 +",
"the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2']",
"'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas':",
"new cumulative count and -1. We don't want it to spike, and we",
"= pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = [] for roi in rois: df_roi",
"Use state name as index # for k, v in US_STATE_ABBREV.items(): # get",
"filter_: Union[dict, bool] = False) -> None: \"\"\" Scrape JHU for US State",
"bool = False) -> None: \"\"\"Fix negative values in daily data. The purpose",
"fix_neg(df: pd.DataFrame, roi: str, columns: list = ['cases', 'deaths', 'recover'], plot: bool =",
"only data columns # 20 or 21 signifies 2020 or 2021 dfs[region][kind] =",
"# if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v # Add 'US_'",
"1. neg_index = diff[diff < 0].index df.loc[neg_index, cum] += 1 else: break #",
"'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite",
"for %s\" % state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y')",
"how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df",
"df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, country) df['population'] = population except: pass",
"state in tqdm(states, desc='US States'): # For each country if state in ['Diamond",
"state)) else: print(\"No data for %s\" % state) def fix_jhu_dates(x): y = datetime.strptime(x,",
"down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates",
"df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs)",
"'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas':",
"'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC', 'North",
"pd.read_csv(url) except HTTPError: print(\"Could not download state-level data for Brazil\") state_code = {'AC':'Acre',",
"return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get a",
"import bs4 from datetime import datetime import matplotlib.pyplot as plt import numpy as",
"List of columns (without cum_ prefix) so create new counts for. Returns: df_fixed",
"Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = [] # we will",
"src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New",
"Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs =",
"uninfected calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path,",
"= ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI',",
"= df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1",
"= source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'): # For",
"N on a subsequent day. This script fixes this by nulling such data",
"= df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] = None # Protect against 0",
"inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing,",
"df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found in population_estimates.csv\".format(args.roi)) return int(population)",
"# for k, v in US_STATE_ABBREV.items(): # get US state abbrev # if",
"= False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives values for a",
"datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool]",
"value which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx #",
"return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict,",
"str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands',",
"columns cum_tests and new_tests to csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src",
"country in tqdm(good_countries, desc='Countries'): # For each country if country in ['Diamond Princess',",
"src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending rois that don't",
"datetime import timedelta, date import pandas as pd pd.options.mode.chained_assignment = None # default='warn'",
"timeseries df if we do. Args: data_path (str): Full path to data directory.",
"data and applying a monotonic spline interpolation in between valid days of data.",
"quality check Args: d (pd.DataFrame): Data from JHU tracker (e.g. df['global]). filter (bool,",
"df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values",
"x in df if any(year in x for year in ['20', '21'])]] #",
"url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could not download state-level",
"Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases',",
"= True) -> None: \"\"\"Gets data from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html",
"df.loc[df.index[-1], cum] = maxx # Then run a loop which: while True: #",
"in str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall",
"do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo',",
"None \"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)]",
"else: if region == 'global': has_no_province = df['Province/State'].isnull() # Whole countries only; use",
"get_population_count(data_path, state) df['population'] = population except: pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv'",
"hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'],",
"= fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns:",
"df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int)",
"% ','.join(good)) # print(\"JHU data not acceptable for %s\" % ','.join(bad)) return good",
"to get Province-level timeseries data starting on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020'",
"'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases']",
"i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs",
"diff.min() < 0: # If there are still negative first-differences at this #",
"given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format #",
"cum_ prefix) so create new counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative",
"'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma':",
"url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations',",
"def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get a list of countries",
"if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no",
"match rois and merge on csv to add cum_test and new_tests rois =",
"in new daily numbers. For example, the cumulative total of cases should not",
"= -1 # Handle cases where # cumulative counts decrease and new_tests becomes",
"data_path (str): Full path to data directory. Returns: ctp_dfs (dict): Dictionary containing US",
"'%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False) ->",
"partial count. df = df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv'))",
"'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT',",
"src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values",
"= json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1, inplace=True) #",
"pandas as pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered':",
"'US']: dfs[region] = {} for kind in ['confirmed', 'deaths', 'recovered']: url = url_template",
"= df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found in population_estimates.csv\".format(args.roi)) return",
"'mortality', 'recovered']: url_path = url_template % kind # Create the full data URL",
"# default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases':",
"from The COVID Tracking Project. https://covidtracking.com Args: data_path (str): Full path to data",
"if OWID vaccines data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True)",
"'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population",
"in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df,",
"'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi],",
"\") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool =",
"Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in csvs: roi = str(csv).split('.')[0].split('_',",
"= df.set_index('dates2') # Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return",
"% (kind, region)) else: if region == 'global': has_no_province = df['Province/State'].isnull() # Whole",
"df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global test results",
"np import pandas as pd import requests from tqdm import tqdm from typing",
"% c new = 'new_%s' % c before = df[cum].copy() non_zeros = df[df[new]",
"= True, fixes: bool = False) -> None: \"\"\" Get testing data from",
"import matplotlib.pyplot as plt import numpy as np import pandas as pd import",
"in data_path that match rois and merge on csv to add cum_test and",
"csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove =",
"= df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where column is people_tested and",
"'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona':",
"bool = False) -> None: \"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>)",
"0: print(\"Negifying 'new_%s' for %s\" % (kind, roi)) df['new_%s' % kind] = -1",
"False) -> None: \"\"\" Get testing data from Our World In Data https://github.com/owid/covid-19-data",
"df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover',",
"data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error:",
"roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad data",
"[] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col = 'cum_' +",
"criteria. \"\"\" good = set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict): filter_ =",
"str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final day because it is often a",
"# Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover',",
"'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_: Union[dict, bool] = True) -> None:",
"df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region')",
"fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d')",
"'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations']",
"optional): Whether to filter by quality criteria. \"\"\" good = set(d['confirmed'].index) if filter_",
"containing US States (keys) and dataframes containing dates, recovery data (values). \"\"\" archived_data",
"in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If",
"df_raw = pd.read_csv(url) except HTTPError: print(\"Could not download state-level data for Brazil\") state_code",
"for kind in ['cases', 'mortality', 'recovered']: url_path = url_template % kind # Create",
"as fnf_error: print(fnf_error, 'Could not add OWID data.') pass for i in df_timeseries.columns:",
"good def get_population_count(data_path:str, roi): \"\"\" Check if we have population count for roi",
"state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw =",
"df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try: population =",
"roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add",
"new src_roi = src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that match roi",
"'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')]",
"convert dates to string df = df.set_index('dates2') # Convert to int df['new_recover'] =",
"HTTPError import urllib.request, json import os from datetime import timedelta, date import pandas",
"= 'cum_%s' % c new = 'new_%s' % c before = df[cum].copy() non_zeros",
"can add new src_roi = src_trim[src_trim['region'] == roi] # filter rows that match",
"in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final day",
"dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No data for %s\"",
"plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns: list = ['cases',",
"plt.legend() else: after = before # Make sure the first differences are now",
"= df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID",
"name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for country in ['China',",
"in columns: cum_col = 'cum_' + col dummy_cum_col = 'dummy_' + cum_col new_col",
"data across all states'): url = url_template % i try: df = pd.read_csv(url)",
"Then run a loop which: while True: # Interpolates the cumulative column nulls",
"'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL',",
"'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania':",
"df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before",
"# Fill NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' %",
"df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state) df['population']",
"try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2']",
"# cumulative tests are named 'People_Tested' for first 200 ish days # then",
"inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = [] for roi in",
"= df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global test results missing",
"reporting end_dt = date.today() dates = [] delta = end_dt - start_dt delta",
"or 21 signifies 2020 or 2021 dfs[region][kind] = df # Add to dictionary",
"c new = 'new_%s' % c before = df[cum].copy() non_zeros = df[df[new] >",
"= df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state) df['population'] =",
"['cases', 'deaths', 'recover'], plot: bool = False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to",
"code'] == roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations',",
"Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias',",
"a large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')]",
"to global csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim",
"to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2').fillna(0).astype(int)",
"drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending rois that",
"get_canada(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) -> None:",
"dfs[1] recovered = dfs[2] # combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer')",
"df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']]",
"zero after previously # being non-zero bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index,",
"global csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim =",
"/ ('covidtimeseries_%s.csv' % state)) else: print(\"No data for %s\" % state) def fix_jhu_dates(x):",
"(str): Full path to data directory. plot (bool): Whether to plot the changes.",
"= ['cases', 'deaths', 'recover'], plot: bool = False) -> pd.DataFrame: \"\"\"Used by `fix_negatives`",
"[] for i in tqdm(dates, desc=f'Scraping {delta} days of data across all states'):",
"= df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim",
"the values df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) -> None: \"\"\"Fix",
"None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS =",
"= data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found in",
"Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio",
"pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get a list of countries from a",
"dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not",
"'covidtimeseries' in str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df =",
"-> None: \"\"\"Gets data from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args:",
"else: print(\"No data for %s\" % state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y')",
"-1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files",
"in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new df_roi_tests",
"acceptable for %s\" % ','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\" Check if",
"date format # first check if roi reports recovery data as recovered if",
"# Then repeat if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r",
"get Province-level timeseries data starting on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for",
"df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for country in ['China', 'Canada', 'Australia']: if",
"+ col dummy_cum_col = 'dummy_' + cum_col new_col = 'new_' + col try:",
"for {roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col])",
"{} for region in ['global', 'US']: dfs[region] = {} for kind in ['confirmed',",
"if any(year in x for year in ['20', '21'])]] # Use only data",
"src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True)",
"obj before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string",
"# we will append dfs for cases, deaths, recovered here # URL for",
"because it is often a partial count. df = df.iloc[:-1] df = fix_neg(df,",
"'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA',",
"API call to get Province-level timeseries data starting on Jan 22 2020 url_template",
"increase the corresponding cumulative values by 1. neg_index = diff[diff < 0].index df.loc[neg_index,",
"Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware':",
"\"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no data continue try: timeseries_path = data_path",
"global test results missing for: \") for roi in roi_codes_dict: if roi in",
"to data directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list",
"df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, 'CA_' + province) df['population']",
"to make sure that cumulative counts are non-decreasing. Args: df (pd.DataFrame): DataFrame containing",
"to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2')",
"date2): for n in range(int ((date2 - date1).days)+1): yield date1 + timedelta(n) def",
"'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV',",
"count and -1. We don't want it to spike, and we don't want",
"that data in its own .csv file. source = dfs['global'] for country in",
"df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int)",
"of CSVs per date containing states and test results. Args: data_path (str): Full",
"# fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values",
"decrease and new_tests becomes a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']]",
"df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] +",
"add recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int)",
"as index # for k, v in US_STATE_ABBREV.items(): # get US state abbrev",
"False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by",
"= 'US_' + v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df =",
"('covidtimeseries_%s.csv' % country)) else: print(\"No data for %s\" % country) source = dfs['US']",
"example, the cumulative total of cases should not go from N to a",
"src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values",
"def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool] = True,",
"missing and new counts get missed. New counts spike when cumulative counts go",
"code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data =",
"'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values",
"loop which: while True: # Interpolates the cumulative column nulls to have #",
"signifies 2020 or 2021 dfs[region][kind] = df # Add to dictionary of dataframes",
"\"\"\" for c in columns: cum = 'cum_%s' % c new = 'new_%s'",
"count for roi and add to timeseries df if we do. Args: data_path",
"# Check if OWID testing data already included if 'vaccin' in i: df_timeseries.drop([i],",
"bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] = None # 2) New",
"Union[dict, bool] = True): \"\"\"Get a list of countries from a global dataframe",
"get_owid_tests(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) -> None:",
"/ 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values",
"continue # If we have data in the downloaded JHU files for that",
"= set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\" % ','.join(good)) # print(\"JHU data",
"code']).to_dict() # trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values",
"with 0 and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' %",
"'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If we have data in the",
"df['new_deaths'] try: roi = 'BR_' + state_code[state] population = get_population_count(data_path, roi) df['population'] =",
"df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases',",
"return good def get_population_count(data_path:str, roi): \"\"\" Check if we have population count for",
"uses monotonic spline interpolation to make sure that cumulative counts are non-decreasing. Args:",
"df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated']",
"# Download the data into a dataframe except HTTPError: print(\"Could not download data",
"source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected']",
"by quality criteria. \"\"\" good = set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict):",
"1] plt.title(\"%s %s Raw vs Fixed R=%.5g\" % (roi, c, r)) plt.legend() else:",
"url_template % (kind, region) # Create the full data URL try: df =",
"%s\" % state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def",
"plot (bool): Whether to plot the changes. Returns: None \"\"\" csvs = [x",
"= pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i)",
"region) # Create the full data URL try: df = pd.read_csv(url) # Download",
"vaccines data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop",
"The purpose of this script is to fix spurious negative values in new",
"= df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No data for %s\" %",
"/ 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True)",
"merge those new cases back into dataframe. Args: roi (str): Region we are",
"Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'): source =",
"['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path)",
"= df['new_recover'].fillna(0).astype(int) # create temp new recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths']",
"url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates for scraping start_dt =",
"in data path.\") try: for i in df_timeseries.columns: # Check if testng data",
"df.loc[bad[bad].index, cum] = None # Protect against 0 null final value which screws",
"= df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new =",
"# Add to dictionary of dataframes # Generate a list of countries that",
"Full path to data directory. plot (bool): Whether to plot the changes. Returns:",
"/ f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data path.\")",
"src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = []",
"New counts spike when cumulative counts go to -1 for missing data and",
"data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1, inplace=True)",
"date import pandas as pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed':",
"save that data in its own .csv file. source = dfs['global'] for country",
"print(\"Could not download data for %s, %s\" % (kind, region)) else: if region",
"= date.today() dates = [] delta = end_dt - start_dt delta = delta.days",
"= df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df",
"containing states and test results. Args: data_path (str): Full path to data directory.",
"df['new_%s' % kind] = -1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path:",
"The COVID Tracking Project. https://covidtracking.com Args: data_path (str): Full path to data directory.",
"first 200 ish days # then cumulative tests are named 'Total_Test_Results' after 200",
"ROI (if exists). \"\"\" try: # open population file df_pop = pd.read_csv(data_path /",
"statements. df (pd.DataFrame): DataFrame containing counts but not new counts. columns (list): List",
"= df.loc[first_non_zero, cum].max() # Find the bad entries and null the corresponding #",
"'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover']",
"directory. Returns: None \"\"\" # Where JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\"",
"column, which are: # 1) Cumulative columns which are zero after previously #",
"negative values in new daily numbers. For example, the cumulative total of cases",
"regions no longer tracked, such as: Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall",
"state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v #",
"= { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR',",
"df_raw[df_raw['province'] == province] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover',",
"negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) #",
"cum_col = 'cum_' + col dummy_cum_col = 'dummy_' + cum_col new_col = 'new_'",
"df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x): y =",
"and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else:",
"return ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool] = True, fixes: bool =",
"Returns: None \"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in",
"spurious negative values in new daily numbers. For example, the cumulative total of",
"/ 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim =",
"== province] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases',",
"Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If we have data in the downloaded",
"in tqdm(good_countries, desc='Countries'): # For each country if country in ['Diamond Princess', 'Grand",
"US_STATE_ABBREV[k] = 'US_' + v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df",
"Cumulative columns which are zero after previously # being non-zero bad = df.loc[first_non_zero:,",
"for country in ['China', 'Canada', 'Australia']: if country == 'Canada' and kind in",
"data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found",
"roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']],",
"= pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim",
"state] # Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) #",
"json import os from datetime import timedelta, date import pandas as pd pd.options.mode.chained_assignment",
"good_countries = get_countries(dfs['global'], filter_=filter_) # For each \"good\" country, # reformat and save",
"def negify_missing(data_path: str) -> None: \"\"\"Fix negative values in daily data. The purpose",
"named 'Total_Test_Results' after 200 ish days dfs = [] for i in tqdm(dates,",
"str): \"\"\"Gets archived US recovery data from The COVID Tracking Project. https://covidtracking.com Args:",
"df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover']",
"path.\") try: for i in df_timeseries.columns: # Check if testng data already included",
"by 1. neg_index = diff[diff < 0].index df.loc[neg_index, cum] += 1 else: break",
"vaccine results missing for: \") for roi in roi_codes_dict: if roi in unavailable_testing_data:",
"minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\"",
"df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where",
"enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU",
"continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2')",
"% US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not",
"df = df.set_index('dates2') # Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df",
"'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul',",
"df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download tests data for %s\" % i)",
"So create a dummy dataframe with forward filled cumulative counts and perform new",
"of data. This only affects a small number of regions. It overwrites the",
"before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df",
"in columns: cum = 'cum_%s' % c new = 'new_%s' % c before",
"kind # Create the full data URL with urllib.request.urlopen(url_path) as url: data =",
"json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1, inplace=True) # removing",
"according to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For each \"good\" country,",
"province) df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj",
"(str): Region we are working with; used for print statements. df (pd.DataFrame): DataFrame",
"in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path /",
"= source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] =",
"== \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found in population_estimates.csv\".format(args.roi)) return int(population) def",
"Generate a list of countries that have \"good\" data, # according to these",
"minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad =",
"of cases should not go from N to a value less than N",
"before = df[cum].copy() non_zeros = df[df[new] > 0].index has_negs = before.diff().min() < 0",
"in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2']",
"for col in columns: cum_col = 'cum_' + col dummy_cum_col = 'dummy_' +",
"source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for",
"cumulative column, which are: # 1) Cumulative columns which are zero after previously",
"for scraping start_dt = date(2020, 4, 12) # When JHU starts reporting end_dt",
"ish days # then cumulative tests are named 'Total_Test_Results' after 200 ish days",
"= False) -> None: \"\"\" Get US vaccines data from Our World In",
"filter_: Union[dict, bool] = True): \"\"\"Get a list of countries from a global",
"print(\"OWID global test results missing for: \") for roi in roi_codes_dict: if roi",
"roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no data",
"cum] = maxx # Then run a loop which: while True: # Interpolates",
"data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting",
"how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia',",
"testing data from Our World In Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests",
"new counts before the gap. So create a dummy dataframe with forward filled",
"on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new recover for df['new_uninfected'] =",
"(kind, region)) else: if region == 'global': has_no_province = df['Province/State'].isnull() # Whole countries",
"Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina',",
"('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool] = True, fixes: bool",
"try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except",
"cumulative values by 1. neg_index = diff[diff < 0].index df.loc[neg_index, cum] += 1",
"'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu',",
"if OWID testing data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True)",
"dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID",
"region. roi (str): One region, e.g 'US_MI' or 'Greece'. columns (list, optional): Columns",
"for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to",
"have # monotonic growth after = df[cum].interpolate('pchip') diff = after.diff() if diff.min() <",
"population except: pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to",
"= 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated',",
"roi in roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in",
"'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington':",
"entries and null the corresponding # cumulative column, which are: # 1) Cumulative",
"'deaths', 'recover']: if df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s' for %s\" %",
"Check if OWID testing data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1,",
"pass # Fill NaN with 0 and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int)",
"in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] =",
"# Fill NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' %",
"= df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col = 'cum_' + col dummy_cum_col",
"source = df_raw[df_raw['province'] == province] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases',",
"inplace=True) # drop so we can add new src_roi = src_trim[src_trim['region'] == roi]",
"# then cumulative tests are named 'Total_Test_Results' after 200 ish days dfs =",
"filter_ = JHU_FILTER_DEFAULTS if filter_: for key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1)",
"dfs at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases = dfs[0] deaths",
"'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do",
"import datetime import matplotlib.pyplot as plt import numpy as np import pandas as",
"overwrites the original .csv files produced by the functions above. Args: data_path (str):",
"are: # 1) Cumulative columns which are zero after previously # being non-zero",
"tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in ['cases', 'deaths',",
"US State level test results. Data is stored as a collection of CSVs",
"collection of CSVs per date containing states and test results. Args: data_path (str):",
"negatives values for a single region. This function uses monotonic spline interpolation to",
"# combine counties to create state level data df = df[[x for x",
"= source['recovered'].values # check if roi reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all()",
"# drop so we can add new df_roi_tests = df_tests[df_tests['roi'] == roi] #",
"source = df_raw[df_raw['state'] == state] # Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover'])",
"as a collection of CSVs per date containing states and test results. Args:",
"a subsequent day. This script fixes this by nulling such data and applying",
"columns to US csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url)",
"'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns)",
"df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif",
"from urllib.error import HTTPError import urllib.request, json import os from datetime import timedelta,",
"desc='Countries'): # For each country if country in ['Diamond Princess', 'Grand Princess', 'MS",
"a global dataframe optionally passing a quality check Args: d (pd.DataFrame): Data from",
"/ ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_:",
"df = df.groupby('Province_State').sum() # combine counties to create state level data df =",
"/ ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error,",
"and null the corresponding # cumulative column, which are: # 1) Cumulative columns",
"against 0 null final value which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1],",
"we will append dfs for cases, deaths, recovered here # URL for API",
"with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x):",
"sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now",
"src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations',",
"'Could not add OWID data.') pass for i in df_timeseries.columns: # Check if",
"fixes: bool = False) -> None: \"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br",
"\\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path,",
"dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] =",
"for state in states: # For each country source = df_raw[df_raw['state'] == state]",
"data_path.iterdir() if 'covidtimeseries' in str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1]",
"def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are cases where cum counts go",
"# convert dates to string df = df.set_index('dates2') # Convert to int df['new_recover']",
"Provinces. https://opencovid.ca/ \"\"\" dfs = [] # we will append dfs for cases,",
"prefix) so create new counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and",
"df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests',",
"= df_tests[df_tests['roi'] == roi] # filter down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2',",
"print(\"Could not add population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by",
"of columns (without cum_ prefix) so create new counts for. Returns: df_fixed (pd.DataFrame):",
"\"\"\" # Where JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") #",
"as: Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\"",
"return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data from The COVID",
"recovery data from The COVID Tracking Project. https://covidtracking.com Args: data_path (str): Full path",
"merge on csv to add cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove =",
"dfs[0] deaths = dfs[1] recovered = dfs[2] # combine dfs df_rawtemp = cases.merge(recovered,",
"level data df = df[[x for x in df if any(year in x",
"= df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True)",
"= pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] =",
"'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' }",
"null the corresponding # cumulative column, which are: # 1) Cumulative columns which",
"'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except:",
"# If there are still negative first-differences at this # point, increase the",
"vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns to US csvs",
"from typing import Union from urllib.error import HTTPError import urllib.request, json import os",
"'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP',",
"In Data https://github.com/owid/covid-19-data Add columns to global csvs in data_path. \"\"\" url =",
"def get_brazil(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) ->",
"any(year in x for year in ['20', '21'])]] # Use only data columns",
"try: # open population file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv",
"have data in the downloaded JHU files for that country if state in",
"final day because it is often a partial count. df = df.iloc[:-1] df",
"on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 # Handle cases",
"a list of countries that have \"good\" data, # according to these criteria:",
"src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2',",
"create state level data df = df[[x for x in df if any(year",
"Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw",
"= src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] =",
"cum] = None # 2) New daily columns which are negative bad =",
"= get_population_count(data_path, country) df['population'] = population except: pass # Fill NaN with 0",
"how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 # Handle",
"Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'}",
"a new cumulative count and -1. We don't want it to spike, and",
"df_timeseries.columns: # Check if OWID testing data already included if 'vaccin' in i:",
"pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values #",
"a single region. This function uses monotonic spline interpolation to make sure that",
"cum = 'cum_%s' % c new = 'new_%s' % c before = df[cum].copy()",
"tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province] # Only the given province",
"full data URL try: df = pd.read_csv(url) # Download the data into a",
"df = df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state) df['population'] = population except:",
"states = df_raw['state'].unique() ctp_dfs = {} for state in states: # For each",
"= df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1]",
"= pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3",
"'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI',",
"x for year in ['20', '21'])]] # Use only data columns # 20",
"'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we have data in",
"country) df['population'] = population except: pass # Fill NaN with 0 and convert",
"assert after.diff().min() >= 0 # Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return df",
"vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns to global csvs",
"df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict,",
"data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] =",
"None: \"\"\"Gets data from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path",
"url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1,",
"[] delta = end_dt - start_dt delta = delta.days for dt in daterange(start_dt,",
"= cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True)",
"print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are cases where cum counts",
"df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where column is people_tested",
"except: pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to string",
"is taken between a new cumulative count and -1. We don't want it",
"= source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery",
"Somoa)\"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove",
"pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths':",
"df['new_deaths'] # new uninfected calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try:",
"roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated',",
"now open csvs in data_path that match rois and merge on csv to",
"+ df['new_deaths'] try: roi = 'BR_' + state_code[state] population = get_population_count(data_path, roi) df['population']",
"\"good\" country, # reformat and save that data in its own .csv file.",
"https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to data directory. Returns: None \"\"\"",
"datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame,",
"\"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could not download",
"% ','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\" Check if we have population",
"# new uninfected calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population",
"'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau']",
"not found in data path.\") try: for i in df_timeseries.columns: # Check if",
"the downloaded JHU files for that country if country in source['confirmed'].index: df =",
"in rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int)",
"not acceptable for %s\" % ','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\" Check",
"df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col = 'cum_' + col",
"if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download",
"df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick',",
"values in new daily numbers. For example, the cumulative total of cases should",
"add OWID global vaccines data.') pass for i in df_timeseries.columns: # Check if",
"Create the full data URL try: df = pd.read_csv(url) # Download the data",
"tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] == state] # Only the given province",
"dummy_cum_col = 'dummy_' + cum_col new_col = 'new_' + col try: start =",
"'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined,",
"check if roi reports recovery data as recovered if source['recovered'].isnull().all() == False: df['cum_recover']",
"df = df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties to create state level",
"Returns: population (int): Population count for ROI (if exists). \"\"\" try: # open",
"new uninfected calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population =",
"def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data from The COVID Tracking Project.",
"scraping start_dt = date(2020, 4, 12) # When JHU starts reporting end_dt =",
"% state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x):",
"'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District",
"have data in the downloaded JHU files for that country if country in",
"into a dataframe except HTTPError: print(\"Could not download data for %s, %s\" %",
"200 ish days # then cumulative tests are named 'Total_Test_Results' after 200 ish",
"plot: bool = False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives values",
"pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique()",
"'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam':",
"data directory. plot (bool): Whether to plot the changes. Returns: None \"\"\" csvs",
"+ v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df",
"are cases where cum counts go missing and new counts get missed. New",
"df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif region == 'US':",
"df if any(year in x for year in ['20', '21'])]] # Use only",
"= False) -> None: \"\"\"Fix negative values in daily data. The purpose of",
"Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad data continue try: timeseries_path =",
"in its own .csv file. source = dfs['global'] for country in tqdm(good_countries, desc='Countries'):",
"(pd.DataFrame): Data from JHU tracker (e.g. df['global]). filter (bool, optional): Whether to filter",
"population count for roi and add to timeseries df if we do. Args:",
"counts decrease and new_tests becomes a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests',",
"df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global vaccine results missing for: \") for",
"= ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')]",
"df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d')",
"do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian",
"code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests']",
"interpolation in between valid days of data. This only affects a small number",
"for first 200 ish days # then cumulative tests are named 'Total_Test_Results' after",
"'US_' + v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State')",
"Args: data_path (str): Full path to data directory. Returns: None \"\"\" # Where",
"'Grand Princess', 'Recovered'] for i in to_remove: if i in rois: rois.remove(i) for",
"pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with",
"['China', 'Canada', 'Australia']: if country == 'Canada' and kind in 'recovered': continue is_c",
"\"\"\" dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col",
"as pd import requests from tqdm import tqdm from typing import Union from",
"src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values",
"in df_timeseries.columns: # Check if OWID testing data already included if 'vaccin' in",
"source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases',",
"= df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool]",
"= dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV",
"for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested' for",
"[\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no data continue try:",
"# reformat and save that data in its own .csv file. source =",
"0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col} data to add for {roi}.')",
"datetime import matplotlib.pyplot as plt import numpy as np import pandas as pd",
"}, inplace=True) dfs.append(source) cases = dfs[0] deaths = dfs[1] recovered = dfs[2] #",
"= datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool = False)",
"'Kiribati', 'Palau'] for csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in",
"to -1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No",
"matplotlib.pyplot as plt import numpy as np import pandas as pd import requests",
"try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could not download state-level data for Brazil\")",
"src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values",
"roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source",
"Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']: url_path",
"roi): \"\"\" Check if we have population count for roi and add to",
"final value which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx",
"Convert date format # first check if roi reports recovery data as recovered",
"in tqdm(dates, desc=f'Scraping {delta} days of data across all states'): url = url_template",
"provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario',",
"'Australia']: if country == 'Canada' and kind in 'recovered': continue is_c = df['Country/Region']",
"test results. Args: data_path (str): Full path to data directory. Returns: None \"\"\"",
"= str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final day because it is often",
"'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] =",
"os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove {}. Check that",
"call to get Province-level timeseries data starting on Jan 22 2020 url_template =",
"\"New York\", inplace=True) # fix NY name src_rois = src_trim['region'].unique() for roi in",
"in rois: rois.remove(i) for roi in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try:",
"+ province) df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime",
"columns to global csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url)",
"population = get_population_count(data_path, 'CA_' + province) df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True)",
"int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path:",
"','.join(good)) # print(\"JHU data not acceptable for %s\" % ','.join(bad)) return good def",
"in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3",
"already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we",
"to data directory. Returns: ctp_dfs (dict): Dictionary containing US States (keys) and dataframes",
"# add recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] =",
"= src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations']",
"URL with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind",
"\"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try:",
"= pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2') # Convert to",
"= source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi =",
"df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could",
"y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool =",
"not go from N to a value less than N on a subsequent",
"str): \"\"\"Delete time-series files for regions no longer tracked, such as: Diamond Princess,",
"we can index into date on all 3 dfs at same position source.rename(columns={source.columns[1]:",
"add to timeseries df if we do. Args: data_path (str): Full path to",
"print(\"JHU data acceptable for %s\" % ','.join(good)) # print(\"JHU data not acceptable for",
"are non-decreasing. Args: df (pd.DataFrame): DataFrame containing data for one region. roi (str):",
"JHU files for that country if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases',",
"a dataframe except HTTPError: print(\"Could not download data for %s, %s\" % (kind,",
"np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to string df['dates2']",
"src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True) # fix NY name",
"Args: data_path (str): Full path to data directory. plot (bool): Whether to plot",
"'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {} for state in",
"np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs Fixed R=%.5g\" % (roi, c, r))",
"url = url_template % (kind, region) # Create the full data URL try:",
"https://covidtracking.com Args: data_path (str): Full path to data directory. Returns: ctp_dfs (dict): Dictionary",
"0 null final value which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum]",
"dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values",
"values for a single region. This function uses monotonic spline interpolation to make",
"data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code',",
"default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5,",
"and new_tests to csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url)",
"dummy dataframe with forward filled cumulative counts and perform new cases calculation, then",
"negative first-differences at this # point, increase the corresponding cumulative values by 1.",
"'%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get a list of",
"~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return",
"columns: cum = 'cum_%s' % c new = 'new_%s' % c before =",
"'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South",
"df.loc[bad[bad].index, cum] = None # 2) New daily columns which are negative bad",
"drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs in data_path that",
"data path.\") try: for i in df_timeseries.columns: # Check if testng data already",
"get_jhu(data_path: str, filter_: Union[dict, bool] = True) -> None: \"\"\"Gets data from Johns",
"# Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values",
"pass for i in df_timeseries.columns: # Check if OWID testing data already included",
"York State\", \"New York\", inplace=True) # fix NY name src_rois = src_trim['region'].unique() for",
"plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0, 1]",
"archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs =",
"print(\"OWID global vaccine results missing for: \") for roi in roi_codes_dict: if roi",
"v in US_STATE_ABBREV.items(): # get US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): #",
"Whether to filter by quality criteria. \"\"\" good = set(d['confirmed'].index) if filter_ and",
"pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] =",
"on a subsequent day. This script fixes this by nulling such data and",
"1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS',",
"source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states,",
"df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases']",
"cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:,",
"for state in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] == state] # Only",
"= fix_jhu_testing_dates(i) # handle cases where column is people_tested and then switches to",
"start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] =",
"r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs Fixed R=%.5g\" % (roi,",
"= pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs",
"# If we have data in the downloaded JHU files for that country",
"containing cumulative and now new counts. \"\"\" dfs = [] df_tmp = df.copy()",
"print(fnf_error, 'Could not add OWID vaccinations data.') pass for i in df_timeseries.columns: #",
"becomes a large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:,",
"'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA',",
"in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']:",
"to miss new counts before the gap. So create a dummy dataframe with",
"FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID global vaccines data.') pass for",
"df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle",
"df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] = None # 2) New daily columns",
"new = 'new_%s' % c before = df[cum].copy() non_zeros = df[df[new] > 0].index",
"= ['Diamond Princess', 'Grand Princess', 'Recovered'] for i in to_remove: if i in",
"filter (bool, optional): Whether to filter by quality criteria. \"\"\" good = set(d['confirmed'].index)",
"df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s' for %s\" % (kind, roi)) df['new_%s'",
"fix_negatives(data_path: str, plot: bool = False) -> None: \"\"\"Fix negative values in daily",
"= source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover']",
".csv files produced by the functions above. Args: data_path (str): Full path to",
"OWID testing data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) #",
"urllib.request, json import os from datetime import timedelta, date import pandas as pd",
"'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato",
"url_path = url_template % kind # Create the full data URL with urllib.request.urlopen(url_path)",
"the full data URL with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source =",
"'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO',",
"and save that data in its own .csv file. source = dfs['global'] for",
"roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool]",
"not add OWID data.') pass for i in df_timeseries.columns: # Check if OWID",
"0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ',",
"trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = []",
"'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations']",
"5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American",
"# overwrite timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y')",
"dates, recovery data (values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data)",
"counts get missed. New counts spike when cumulative counts go to -1 for",
"= True, fixes: bool = False) -> None: \"\"\" Get US vaccines data",
"% (kind, roi)) df['new_%s' % kind] = -1 out = data_path / (csv.name.split('.')[0]+'.csv')",
"= df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation df = df.fillna(-1).astype(int) df =",
"= [] for i in tqdm(dates, desc=f'Scraping {delta} days of data across all",
"forward filled cumulative counts and perform new cases calculation, then merge those new",
"def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True):",
"a list of dates for scraping start_dt = date(2020, 4, 12) # When",
"df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) -> None: \"\"\"Fix negative values",
"} def get_jhu(data_path: str, filter_: Union[dict, bool] = True) -> None: \"\"\"Gets data",
"spike when cumulative counts go to -1 for missing data and the difference",
"'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana':",
"to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in",
"dfs[region][kind] = df # Add to dictionary of dataframes # Generate a list",
"= pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] =",
"bool = False) -> None: \"\"\" Get testing data from Our World In",
"'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format",
"df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = [] for roi in rois:",
"pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data path.\") try: for i in df_timeseries.columns:",
"if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx # Then run a loop which:",
"# fix dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True,",
"df (pd.DataFrame): DataFrame containing data for one region. roi (str): One region, e.g",
"name src_rois = src_trim['region'].unique() for roi in src_rois: if roi in US_STATE_ABBREV: try:",
"source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] =",
"source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi = 'BR_'",
"dataframe. Args: roi (str): Region we are working with; used for print statements.",
"[description] \"\"\" for c in columns: cum = 'cum_%s' % c new =",
"N to a value less than N on a subsequent day. This script",
"Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Get testing",
"df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions no longer tracked, such",
"less than N on a subsequent day. This script fixes this by nulling",
"counts spike when cumulative counts go to -1 for missing data and the",
"= source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']]",
"ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False)",
"new counts. columns (list): List of columns (without cum_ prefix) so create new",
"only affects a small number of regions. It overwrites the original .csv files",
"df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths']",
"US States (keys) and dataframes containing dates, recovery data (values). \"\"\" archived_data =",
"for a single region. This function uses monotonic spline interpolation to make sure",
"check Args: d (pd.DataFrame): Data from JHU tracker (e.g. df['global]). filter (bool, optional):",
"which are: # 1) Cumulative columns which are zero after previously # being",
"'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky':",
"Check if OWID testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1,",
"< 0: # If there are still negative first-differences at this # point,",
"'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands':",
"dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested' for first 200 ish days #",
"'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we have data in the downloaded JHU",
"dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are cases where cum counts go missing",
"tqdm(states, desc='US States'): # For each country if state in ['Diamond Princess', 'Grand",
"timedelta, date import pandas as pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS =",
"-> None: \"\"\" Get US vaccines data from Our World In Data https://github.com/owid/covid-19-data",
"# get US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_'",
"df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) #",
"(kind, region) # Create the full data URL try: df = pd.read_csv(url) #",
"tests are named 'Total_Test_Results' after 200 ish days dfs = [] for i",
"because no data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries",
"src_roi = src_trim[src_trim['region'] == roi] # filter rows that match roi df_combined =",
"York\", inplace=True) # fix NY name src_rois = src_trim['region'].unique() for roi in src_rois:",
"Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to csvs in data_path. \"\"\" url",
"'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX',",
"True) -> None: \"\"\"Gets data from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19",
"desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final day because it",
"df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs =",
"OWID data.') pass for i in df_timeseries.columns: # Check if OWID testng data",
"= df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi = 'BR_' + state_code[state]",
"df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = [] for roi",
"'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = {",
"df = pd.concat([df1] + more_dfs) elif region == 'US': # Use state name",
"# print(\"JHU data not acceptable for %s\" % ','.join(bad)) return good def get_population_count(data_path:str,",
"a list of countries from a global dataframe optionally passing a quality check",
"Gets data from Canada's Open Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs",
"(bool, optional): Whether to filter by quality criteria. \"\"\" good = set(d['confirmed'].index) if",
"fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get",
"if testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) #",
"df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] =",
"dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def",
"of dates for scraping start_dt = date(2020, 4, 12) # When JHU starts",
"None # 2) New daily columns which are negative bad = df.loc[first_non_zero:, new]",
"\"\"\"Fix negative values in daily data. The purpose of this script is to",
"'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv",
"'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values",
"the changes. Returns: None \"\"\" csvs = [x for x in data_path.iterdir() if",
"date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y,",
"df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global",
"US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v",
"'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try:",
"'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values",
"from a global dataframe optionally passing a quality check Args: d (pd.DataFrame): Data",
"src_roi = src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that match roi df_combined",
"end_dt - start_dt delta = delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) #",
"{}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to string",
"open csvs in data_path that match rois and merge on csv to add",
"= df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing, set new",
"csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try: if",
"columns: cum_col = 'cum_' + col dummy_cum_col = 'dummy_' + cum_col new_col =",
"population except: pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No",
"roi reports recovery data as recovered if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values",
"US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2')",
"df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns: list = ['cases', 'deaths',",
"src_rois: if roi in US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi])",
"= df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values",
"'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data for each province",
"in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] == state] # Only the given",
"= {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas",
"'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global test",
"'Could not add OWID vaccinations data.') pass for i in df_timeseries.columns: # Check",
"% kind # Create the full data URL with urllib.request.urlopen(url_path) as url: data",
"df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases',",
"in str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv)",
"'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida':",
"source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values",
"roi in src_rois: if roi in US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv'",
"\"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations',",
"'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto",
"i in df_timeseries.columns: # Check if OWID testng data already included if 'tests'",
"fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict,",
"{delta} days of data across all states'): url = url_template % i try:",
"print(fnf_error, 'Could not add OWID data.') pass for i in df_timeseries.columns: # Check",
"cum] += 1 else: break # Then repeat if plot: plt.figure() plt.plot(df.index, before,",
"= end_dt - start_dt delta = delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\"))",
"data from The COVID Tracking Project. https://covidtracking.com Args: data_path (str): Full path to",
"from Our World In Data https://github.com/owid/covid-19-data Add columns to global csvs in data_path.",
"use country name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for country",
"affects a small number of regions. It overwrites the original .csv files produced",
"timeseries data starting on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in",
"'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL',",
"21 signifies 2020 or 2021 dfs[region][kind] = df # Add to dictionary of",
"'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso",
"df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative",
"unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are",
"recovery data as recovered if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values # check",
"cases should not go from N to a value less than N on",
"new daily numbers. For example, the cumulative total of cases should not go",
"in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no data continue",
"drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True) # fix NY name src_rois =",
"'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois",
"'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in csvs: roi =",
"make sure that cumulative counts are non-decreasing. Args: df (pd.DataFrame): DataFrame containing data",
"so we can add new src_roi = src_trim[src_trim['region'] == roi] # filter rows",
"new df_roi_tests = df_tests[df_tests['roi'] == roi] # filter down to roi df_result =",
"'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska':",
"daily columns which are negative bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum]",
"is often a partial count. df = df.iloc[:-1] df = fix_neg(df, roi, plot=plot)",
"(e.g. df['global]). filter (bool, optional): Whether to filter by quality criteria. \"\"\" good",
"for k, v in US_STATE_ABBREV.items(): # get US state abbrev # if not",
"for roi in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path)",
"to data directory. Returns: None \"\"\" # Where JHU stores their data url_template",
"State\", \"New York\", inplace=True) # fix NY name src_rois = src_trim['region'].unique() for roi",
"missed. New counts spike when cumulative counts go to -1 for missing data",
"data_path (str): Full path to data directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\"",
"Samoa, Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs = [x for x",
"Get testing data from Our World In Data https://github.com/owid/covid-19-data Add columns cum_tests and",
"timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path:",
"df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs in data_path that match rois and",
"\"Samoa\", \"Vanuatu\"]: # skipping because bad data continue try: timeseries_path = data_path /",
"= src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] =",
"= None # Protect against 0 null final value which screws up interpolator",
"continue is_c = df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country",
"that country if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases',",
"and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x,",
"if roi reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] =",
"the original .csv files produced by the functions above. Args: data_path (str): Full",
"roi in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except:",
"axis=1, inplace=True) # drop so we can add new df_roi_tests = df_tests[df_tests['roi'] ==",
"Check if testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True)",
"can add new src_roi = src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that",
"HTTPError: print(\"Could not download state-level data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas',",
"# US_STATE_ABBREV[k] = 'US_' + v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True)",
"remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions no longer tracked, such as: Diamond",
"data not acceptable for %s\" % ','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\"",
"not download data for %s, %s\" % (kind, region)) else: if region ==",
"total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending",
"time-series files for regions no longer tracked, such as: Diamond Princess, MS Zaandam,",
"the data into a dataframe except HTTPError: print(\"Could not download data for %s,",
"for regions no longer tracked, such as: Diamond Princess, MS Zaandam, Samoa, Vanuatu,",
"= dfs['global'] for country in tqdm(good_countries, desc='Countries'): # For each country if country",
"we have population count for roi and add to timeseries df if we",
"\"\"\" try: # open population file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing",
"os from datetime import timedelta, date import pandas as pd pd.options.mode.chained_assignment = None",
"good = set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if",
"Open Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = [] # we",
"Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export",
"int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool] =",
"which are zero after previously # being non-zero bad = df.loc[first_non_zero:, cum] ==",
"= source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] =",
"each \"good\" country, # reformat and save that data in its own .csv",
"'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR',",
"inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new",
"= {} for kind in ['confirmed', 'deaths', 'recovered']: url = url_template % (kind,",
"# Use only data columns # 20 or 21 signifies 2020 or 2021",
"Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If we have data in",
"source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected']",
"= df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x,",
"and -1. We don't want it to spike, and we don't want to",
"days # then cumulative tests are named 'Total_Test_Results' after 200 ish days dfs",
"inplace=True) # fix NY name src_rois = src_trim['region'].unique() for roi in src_rois: if",
"counts to -1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except:",
"inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties to create state",
"-1. We don't want it to spike, and we don't want to miss",
"data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions no",
"source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases',",
"df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut',",
"filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2')",
"-> None: \"\"\" Get testing data from Our World In Data https://github.com/owid/covid-19-data Add",
"inplace=True) # drop so we can add new src_roi = src_trim[src_trim['Alpha-3 code'] ==",
"Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to",
"JHU tracker (e.g. df['global]). filter (bool, optional): Whether to filter by quality criteria.",
"= df_raw[df_raw['state'] == state] # Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2']",
"# fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated']",
"str, columns: list = ['cases', 'deaths', 'recover'], plot: bool = False) -> pd.DataFrame:",
"print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False)",
"import Union from urllib.error import HTTPError import urllib.request, json import os from datetime",
"dfs[region] = {} for kind in ['confirmed', 'deaths', 'recovered']: url = url_template %",
"tests are named 'People_Tested' for first 200 ish days # then cumulative tests",
"# we can index into date on all 3 dfs at same position",
"in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data from",
"used for print statements. df (pd.DataFrame): DataFrame containing counts but not new counts.",
"spline interpolation in between valid days of data. This only affects a small",
"Returns: pd.DataFrame: [description] \"\"\" for c in columns: cum = 'cum_%s' % c",
"['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path)",
"v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df =",
"date1 + timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def",
"get missed. New counts spike when cumulative counts go to -1 for missing",
"+ cum_col new_col = 'new_' + col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0]",
"CSV print(\"OWID global vaccine results missing for: \") for roi in roi_codes_dict: if",
"csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2',",
"that country if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2']",
"that don't have testing data for roi in roi_codes_dict: if roi not in",
"= src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3",
"= src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] =",
"for roi in src_rois: if roi in US_STATE_ABBREV: try: timeseries_path = data_path /",
"the gap. So create a dummy dataframe with forward filled cumulative counts and",
"which are negative bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] = None",
"i try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values",
"before the gap. So create a dummy dataframe with forward filled cumulative counts",
"'Saskatchewan', 'Yukon'] # Export timeseries data for each province for province in tqdm(provinces,",
"cases, deaths, recovered here # URL for API call to get Province-level timeseries",
"3 dfs at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases = dfs[0]",
"# 20 or 21 signifies 2020 or 2021 dfs[region][kind] = df # Add",
"c in columns: cum = 'cum_%s' % c new = 'new_%s' % c",
"df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format # first check if roi reports",
"'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] =",
"# Then run a loop which: while True: # Interpolates the cumulative column",
"-> None: \"\"\" Scrape JHU for US State level test results. Data is",
"after 200 ish days dfs = [] for i in tqdm(dates, desc=f'Scraping {delta}",
"= str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']: if df['cum_%s'",
"US, US_AS (American Somoa)\"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries'",
"country in ['China', 'Canada', 'Australia']: if country == 'Canada' and kind in 'recovered':",
"df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download tests data for %s\"",
"covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'): # For each country if state",
"if country in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands',",
"= source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] =",
"overwrite timeseries CSV print(\"OWID global test results missing for: \") for roi in",
"Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates for",
"'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA',",
"can add new df_roi_tests = df_tests[df_tests['roi'] == roi] # filter down to roi",
"except HTTPError: print(\"Could not download data for %s, %s\" % (kind, region)) else:",
"df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool",
"dfs.append(source) cases = dfs[0] deaths = dfs[1] recovered = dfs[2] # combine dfs",
"data in the downloaded JHU files for that country if country in source['confirmed'].index:",
"'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan',",
"or 'Greece'. columns (list, optional): Columns to make non-decreasing. Defaults to ['cases', 'deaths',",
"\"\"\" Get testing data from Our World In Data https://github.com/owid/covid-19-data Add columns cum_tests",
"# Exclude final day because it is often a partial count. df =",
"df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs in data_path that match",
"dfs['global'] for country in tqdm(good_countries, desc='Countries'): # For each country if country in",
"['global', 'US']: dfs[region] = {} for kind in ['confirmed', 'deaths', 'recovered']: url =",
"large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim =",
"'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] =",
"cum]): df.loc[df.index[-1], cum] = maxx # Then run a loop which: while True:",
"'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE',",
"bool = False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives values for",
"NY name src_rois = src_trim['region'].unique() for roi in src_rois: if roi in US_STATE_ABBREV:",
"'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could not",
"= df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError:",
"interpolation to make sure that cumulative counts are non-decreasing. Args: df (pd.DataFrame): DataFrame",
"None: \"\"\"Fix negative values in daily data. The purpose of this script is",
"For each country source = df_raw[df_raw['state'] == state] # Only the given state",
"convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y')",
"'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3",
"if roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except:",
"# trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values #",
"not new counts. columns (list): List of columns (without cum_ prefix) so create",
"value less than N on a subsequent day. This script fixes this by",
"'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2']",
"skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs)",
"else: after = before # Make sure the first differences are now all",
"Region we are working with; used for print statements. df (pd.DataFrame): DataFrame containing",
"for csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try:",
"df_timeseries.columns: # Check if OWID testng data already included if 'tests' in i:",
"d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable",
"np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx # Then run a loop which: while",
"set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for",
"for: \") for roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \")",
"dataframe except HTTPError: print(\"Could not download data for %s, %s\" % (kind, region))",
"for %s, %s\" % (kind, region)) else: if region == 'global': has_no_province =",
"source['date'].apply(fix_ct_dates) # Convert date format # first check if roi reports recovery data",
"state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao',",
"spike, and we don't want to miss new counts before the gap. So",
"not get tests data for {roi}.') def daterange(date1, date2): for n in range(int",
"in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in ['cases',",
"= source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] =",
"except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID data.') pass for i",
"'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if",
"df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new",
"abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties to",
"2020 or 2021 dfs[region][kind] = df # Add to dictionary of dataframes #",
"for n in range(int ((date2 - date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x):",
"non-zero bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] = None # 2)",
"'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West",
"data, # according to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For each",
"= df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2',",
"if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0,",
"If we have data in the downloaded JHU files for that country if",
"'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE',",
"df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except",
"region, e.g 'US_MI' or 'Greece'. columns (list, optional): Columns to make non-decreasing. Defaults",
"data for %s, %s\" % (kind, region)) else: if region == 'global': has_no_province",
"is stored as a collection of CSVs per date containing states and test",
"results. Args: data_path (str): Full path to data directory. Returns: None \"\"\" url_template",
"in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province] # Only the given",
"small number of regions. It overwrites the original .csv files produced by the",
"NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def",
"the bad entries and null the corresponding # cumulative column, which are: #",
"['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s' for %s\"",
"Whole countries only; use country name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs =",
"0].index has_negs = before.diff().min() < 0 if len(non_zeros) and has_negs: first_non_zero = non_zeros[0]",
"# point, increase the corresponding cumulative values by 1. neg_index = diff[diff <",
"data needed to fit the models.\"\"\" import bs4 from datetime import datetime import",
"src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\",",
"URL for API call to get Province-level timeseries data starting on Jan 22",
"Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Get US",
"we don't want to miss new counts before the gap. So create a",
"'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data for each province for province",
"n in range(int ((date2 - date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y",
"df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert to int df.to_csv(data_path",
"negify_missing(data_path: str) -> None: \"\"\"Fix negative values in daily data. The purpose of",
"to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool]",
"combine counties to create state level data df = df[[x for x in",
"in states: # For each country source = df_raw[df_raw['state'] == state] # Only",
"df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID",
"for %s\" % ','.join(good)) # print(\"JHU data not acceptable for %s\" % ','.join(bad))",
"print(\"No data for %s\" % country) source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data",
"for %s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois",
"needed to fit the models.\"\"\" import bs4 from datetime import datetime import matplotlib.pyplot",
"country == 'Canada' and kind in 'recovered': continue is_c = df['Country/Region'] == country",
"OWID testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) #",
"issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed",
"try: df = pd.read_csv(url) # Download the data into a dataframe except HTTPError:",
"= \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates for scraping start_dt = date(2020,",
"datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool] = True, fixes: bool =",
"drop so we can add new df_roi_tests = df_tests[df_tests['roi'] == roi] # filter",
"Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs",
"'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries",
"src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending rois",
"previously # being non-zero bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] =",
"'People_Tested' for first 200 ish days # then cumulative tests are named 'Total_Test_Results'",
"string df = df.set_index('dates2') # Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] =",
"are now all non-negative assert after.diff().min() >= 0 # Replace the values df[new]",
"filter by quality criteria. \"\"\" good = set(d['confirmed'].index) if filter_ and not isinstance(filter_,",
"datetime import datetime import matplotlib.pyplot as plt import numpy as np import pandas",
"(int): Population count for ROI (if exists). \"\"\" try: # open population file",
"Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Get global",
"that match rois and merge on csv to add cum_test and new_tests rois",
"csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in",
"'recover']. Returns: pd.DataFrame: [description] \"\"\" for c in columns: cum = 'cum_%s' %",
"day because it is often a partial count. df = df.iloc[:-1] df =",
"df.set_index('dates2') # Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs",
"+ df['new_deaths'] # new uninfected calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1)",
"\"\"\" There are cases where cum counts go missing and new counts get",
"in data_path.iterdir() if 'covidtimeseries' in str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi =",
"Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue #",
"len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find the",
"province] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths',",
"timeseries data for each province for province in tqdm(provinces, desc='Canadian Provinces'): source =",
"url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs = {} for",
"deaths, recovered here # URL for API call to get Province-level timeseries data",
"print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes:",
"where cum counts go missing and new counts get missed. New counts spike",
"to -1 for missing data and the difference is taken between a new",
"= None # 2) New daily columns which are negative bad = df.loc[first_non_zero:,",
"(dict): Dictionary containing US States (keys) and dataframes containing dates, recovery data (values).",
"# Export timeseries data for each province for province in tqdm(provinces, desc='Canadian Provinces'):",
">= 0 # Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path:",
"cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces",
"# overwrite timeseries CSV except: print(f'Could not get tests data for {roi}.') def",
"1) Cumulative columns which are zero after previously # being non-zero bad =",
"for %s\" % country) source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path)",
"directory. plot (bool): Whether to plot the changes. Returns: None \"\"\" csvs =",
"data from Our World In Data https://github.com/owid/covid-19-data Add columns to US csvs in",
"drop so we can add new src_roi = src_trim[src_trim['region'] == roi] # filter",
"'Palau'] for csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove:",
"non-decreasing. Args: df (pd.DataFrame): DataFrame containing data for one region. roi (str): One",
"['Diamond Princess', 'Grand Princess', 'Recovered'] for i in to_remove: if i in rois:",
"= src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True) # fix",
"to timeseries df if we do. Args: data_path (str): Full path to data",
"','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\" Check if we have population count",
"= pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {} for state in states: #",
"'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME',",
"There are cases where cum counts go missing and new counts get missed.",
"path to data directory. plot (bool): Whether to plot the changes. Returns: None",
"bad entries and null the corresponding # cumulative column, which are: # 1)",
"= False) -> None: \"\"\" Get testing data from Our World In Data",
"Data from JHU tracker (e.g. df['global]). filter (bool, optional): Whether to filter by",
"this column so # we can index into date on all 3 dfs",
"df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties to create",
"plot: bool = False) -> None: \"\"\"Fix negative values in daily data. The",
"directory. Returns: ctp_dfs (dict): Dictionary containing US States (keys) and dataframes containing dates,",
"it to spike, and we don't want to miss new counts before the",
"as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN",
"'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia':",
"US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add",
"= src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York",
"for x in df if any(year in x for year in ['20', '21'])]]",
"('covidtimeseries_%s.csv' % state)) else: print(\"No data for %s\" % state) def fix_jhu_dates(x): y",
"https://github.com/owid/covid-19-data Add columns to global csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src",
"New daily columns which are negative bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index,",
"fixes: bool = False) -> None: \"\"\" Get testing data from Our World",
"'new_%s' % c before = df[cum].copy() non_zeros = df[df[new] > 0].index has_negs =",
"cumulative counts decrease and new_tests becomes a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int)",
"'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia': 'US_DC',",
"'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio",
"fix NY name src_rois = src_trim['region'].unique() for roi in src_rois: if roi in",
"fixes: bool = False) -> None: \"\"\" Gets data from Canada's Open Covid",
"None \"\"\" # Where JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\")",
"Protect against 0 null final value which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]):",
"up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx # Then run a",
"def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) ->",
"df['new_recover'] + df['new_deaths'] try: roi = 'BR_' + state_code[state] population = get_population_count(data_path, roi)",
"'deaths', 'recovered']: url = url_template % (kind, region) # Create the full data",
"% province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool]",
"state_code[state] population = get_population_count(data_path, roi) df['population'] = population except: print(\"Could not add population",
"such data and applying a monotonic spline interpolation in between valid days of",
"but not new counts. columns (list): List of columns (without cum_ prefix) so",
"get tests data for {roi}.') def daterange(date1, date2): for n in range(int ((date2",
"(bool): Whether to plot the changes. Returns: None \"\"\" csvs = [x for",
"now all non-negative assert after.diff().min() >= 0 # Replace the values df[new] =",
"Dictionary containing US States (keys) and dataframes containing dates, recovery data (values). \"\"\"",
"ish days dfs = [] for i in tqdm(dates, desc=f'Scraping {delta} days of",
"go missing and new counts get missed. New counts spike when cumulative counts",
"0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama':",
"filter_ and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for key, minimum",
"roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall",
"df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries",
"# Fill NaN with 0 and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path",
"try: population = get_population_count(data_path, 'CA_' + province) df['population'] = population except: pass df.sort_values(by=['dates2'],",
"cum_col new_col = 'new_' + col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill",
"= get_countries(dfs['global'], filter_=filter_) # For each \"good\" country, # reformat and save that",
"= get_population_count(data_path, state) df['population'] = population except: pass dfs[state] = df dfs[state].to_csv(data_path /",
"df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2').fillna(0).astype(int) # Fill",
"recovered here # URL for API call to get Province-level timeseries data starting",
"{'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths':",
"new_tests to csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes",
"print(\"Missing population_estimates.csv in data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population",
"# skipping because bad data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' %",
"first-differences at this # point, increase the corresponding cumulative values by 1. neg_index",
"csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing",
"data acceptable for %s\" % ','.join(good)) # print(\"JHU data not acceptable for %s\"",
"columns (list): List of columns (without cum_ prefix) so create new counts for.",
"don't want it to spike, and we don't want to miss new counts",
"(kind, roi)) df['new_%s' % kind] = -1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out)",
"== 0: print(\"Negifying 'new_%s' for %s\" % (kind, roi)) df['new_%s' % kind] =",
"/ 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {} for state",
"-> None: \"\"\"Fix negative values in daily data. The purpose of this script",
"State level test results. Data is stored as a collection of CSVs per",
"rois = df_combined['Province_State'].unique() sorted_dfs = [] for roi in rois: df_roi = df_combined[df_combined['Province_State']",
"df (pd.DataFrame): DataFrame containing counts but not new counts. columns (list): List of",
"src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes",
"'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths']",
"fix dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True)",
"axis=1) try: population = get_population_count(data_path, state) df['population'] = population except: pass dfs[state] =",
"applying a monotonic spline interpolation in between valid days of data. This only",
"url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']: url_path = url_template %",
"# For each country if country in ['Diamond Princess', 'Grand Princess', 'MS Zaandam',",
"Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte',",
"in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could not",
"/ (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns: list = ['cases', 'deaths', 'recover'],",
"str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) #",
"df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts",
"def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) ->",
"# Check if OWID testng data already included if 'tests' in i: df_timeseries.drop([i],",
"Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs = [x for x in data_path.iterdir()",
"'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries",
"= pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert",
"src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois =",
"the corresponding cumulative values by 1. neg_index = diff[diff < 0].index df.loc[neg_index, cum]",
"df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths',",
"for i in df_timeseries.columns: # Check if OWID testng data already included if",
"True, fixes: bool = False) -> None: \"\"\" Get state-level data for Brazil.",
"R=%.5g\" % (roi, c, r)) plt.legend() else: after = before # Make sure",
"'Greece'. columns (list, optional): Columns to make non-decreasing. Defaults to ['cases', 'deaths', 'recover'].",
"a partial count. df = df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path /",
"pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to string df['dates2']",
"to string df = df.set_index('dates2') # Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state]",
"to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For each \"good\" country, #",
"OWID vaccinations data.') pass for i in df_timeseries.columns: # Check if OWID vaccines",
"None: \"\"\" Scrape JHU for US State level test results. Data is stored",
"Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR',",
"roi (str): One region, e.g 'US_MI' or 'Greece'. columns (list, optional): Columns to",
"plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s",
"'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont':",
"'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] =",
"still negative first-differences at this # point, increase the corresponding cumulative values by",
"Add columns to global csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src =",
"# overwrite timeseries CSV print(\"OWID global test results missing for: \") for roi",
"we are working with; used for print statements. df (pd.DataFrame): DataFrame containing counts",
"data for each province for province in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province']",
"\"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could not download state-level data for",
"-1 # Handle cases where # cumulative counts decrease and new_tests becomes a",
"'%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False) -> None: \"\"\" Scrape",
"= [] for country in ['China', 'Canada', 'Australia']: if country == 'Canada' and",
"fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns: list",
"rois: rois.remove(i) for roi in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries",
"import pandas as pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5,",
"create a dummy dataframe with forward filled cumulative counts and perform new cases",
"so we can add new src_roi = src_trim[src_trim['Alpha-3 code'] == roi] # filter",
"== roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated',",
"for appending rois that don't have testing data for roi in roi_codes_dict: if",
"counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and now new counts. \"\"\"",
"(list): List of columns (without cum_ prefix) so create new counts for. Returns:",
"file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try: population",
"df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] = None # Protect against 0 null",
"key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad",
"'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, 'CA_' + province)",
"for getting data needed to fit the models.\"\"\" import bs4 from datetime import",
"for roi in rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests']",
"'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, country) df['population']",
"get US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' +",
"'New Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern",
"except: print(f\"{csv_path} not found in data path.\") try: for i in df_timeseries.columns: #",
"World In Data https://github.com/owid/covid-19-data Add columns to US csvs in data_path. \"\"\" url",
"= df_raw[df_raw['province'] == province] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths',",
"desc='US States'): # For each country if state in ['Diamond Princess', 'Grand Princess',",
"False) -> None: \"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url",
"directory. roi (str): Region. Returns: population (int): Population count for ROI (if exists).",
"'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon']",
"'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state",
"in the downloaded JHU files for that country if country in source['confirmed'].index: df",
"cumulative count and -1. We don't want it to spike, and we don't",
"'new_' + col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2',",
"df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could not get tests data",
"tqdm from typing import Union from urllib.error import HTTPError import urllib.request, json import",
"not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for key, minimum in filter_.items():",
"York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH',",
"src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2']",
"Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to data",
"= [] # we will append dfs for cases, deaths, recovered here #",
"'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia',",
"testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop",
"# Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def",
"src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values #",
"Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError:",
"and new_tests becomes a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] =",
"# cumulative column, which are: # 1) Cumulative columns which are zero after",
"df def negify_missing(data_path: str) -> None: \"\"\"Fix negative values in daily data. The",
"Then repeat if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r =",
"add cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess',",
"state level data df = df[[x for x in df if any(year in",
"source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff()",
"end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested' for first 200 ish days",
"200 ish days dfs = [] for i in tqdm(dates, desc=f'Scraping {delta} days",
"= [] for roi in rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi =",
"\"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no data continue try: timeseries_path",
"pd.concat([df1] + more_dfs) elif region == 'US': # Use state name as index",
"for {roi}.') def daterange(date1, date2): for n in range(int ((date2 - date1).days)+1): yield",
"population = get_population_count(data_path, roi) df['population'] = population except: print(\"Could not add population data",
"pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum() == 0:",
"of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID',",
"= dfs[0] deaths = dfs[1] recovered = dfs[2] # combine dfs df_rawtemp =",
"= get_population_count(data_path, 'CA_' + province) df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True) #",
"download data for %s, %s\" % (kind, region)) else: if region == 'global':",
"= url_template % kind # Create the full data URL with urllib.request.urlopen(url_path) as",
"population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data from The",
"data directory. roi (str): Region. Returns: population (int): Population count for ROI (if",
"0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64')",
"df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add",
"to string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert to",
"States (keys) and dataframes containing dates, recovery data (values). \"\"\" archived_data = data_path",
"source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover']",
"'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] =",
"df_timeseries.columns: # Check if testng data already included if 'tests' in i: df_timeseries.drop([i],",
"df[cum].interpolate('pchip') diff = after.diff() if diff.min() < 0: # If there are still",
"= pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates",
"cumulative counts are non-decreasing. Args: df (pd.DataFrame): DataFrame containing data for one region.",
"code'] == roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left',",
"data_path (str): Full path to data directory. roi (str): Region. Returns: population (int):",
"src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path /",
"src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values",
"if 'covidtimeseries' in str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df",
"df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where # cumulative counts",
"pandas as pd import requests from tqdm import tqdm from typing import Union",
"desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province] # Only the given province df",
"\"\"\" Get global vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns",
"'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data",
"['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping",
"'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do",
"= df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes:",
"in ['confirmed', 'deaths', 'recovered']: url = url_template % (kind, region) # Create the",
"'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_: Union[dict,",
"df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) #",
"df return ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool] = True, fixes: bool",
"str, filter_: Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\"",
"src_trim.replace(\"New York State\", \"New York\", inplace=True) # fix NY name src_rois = src_trim['region'].unique()",
"inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where # cumulative",
"and the difference is taken between a new cumulative count and -1. We",
"from Canada's Open Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = []",
"-> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives values for a single region.",
"df.groupby('Province_State').sum() # combine counties to create state level data df = df[[x for",
"df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col = 'cum_' + col dummy_cum_col =",
"switches to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results'",
"inplace=True) # drop so we can add new df_roi_tests = df_tests[df_tests['roi'] == roi]",
"counts are non-decreasing. Args: df (pd.DataFrame): DataFrame containing data for one region. roi",
"regions. It overwrites the original .csv files produced by the functions above. Args:",
"for country in tqdm(good_countries, desc='Countries'): # For each country if country in ['Diamond",
"state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format # first",
"# Create the full data URL with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode())",
"if filter_ and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for key,",
"DataFrame containing cumulative and now new counts. \"\"\" dfs = [] df_tmp =",
"new] < 0 df.loc[bad[bad].index, cum] = None # Protect against 0 null final",
"so create new counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and now",
"= src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes =",
"Get global vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns to",
"# Find the bad entries and null the corresponding # cumulative column, which",
"= dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states, desc='US",
"JHU for US State level test results. Data is stored as a collection",
"df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where column is people_tested and then switches",
"if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add",
"= df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where #",
"print(f'Could not get tests data for {roi}.') def daterange(date1, date2): for n in",
"this by nulling such data and applying a monotonic spline interpolation in between",
"csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path",
"src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict =",
"\"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] for",
"Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode",
"column so # we can index into date on all 3 dfs at",
"col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True)",
"pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs in",
"are zero after previously # being non-zero bad = df.loc[first_non_zero:, cum] == 0",
"if we do. Args: data_path (str): Full path to data directory. roi (str):",
"we do. Args: data_path (str): Full path to data directory. roi (str): Region.",
"0 df.loc[bad[bad].index, cum] = None # 2) New daily columns which are negative",
"= ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')]",
"roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns: list =",
"= country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif region",
"Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN',",
"'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_:",
"0 df.loc[bad[bad].index, cum] = None # Protect against 0 null final value which",
"pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where",
"'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS',",
"are working with; used for print statements. df (pd.DataFrame): DataFrame containing counts but",
"each country source = df_raw[df_raw['state'] == state] # Only the given state df",
"down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0,",
"df_raw['state'].unique() ctp_dfs = {} for state in states: # For each country source",
"= df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download tests data for %s\" %",
"/ ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error,",
"'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI',",
"(keys) and dataframes containing dates, recovery data (values). \"\"\" archived_data = data_path /",
"timeseries CSV except: print(f'Could not get tests data for {roi}.') def daterange(date1, date2):",
"files for that country if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths',",
"\"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs",
"dfs = [] for i in tqdm(dates, desc=f'Scraping {delta} days of data across",
"Provinces'): source = df_raw[df_raw['province'] == province] # Only the given province df =",
"# handle cases where column is people_tested and then switches to Total_Test_Results if",
"Population count for ROI (if exists). \"\"\" try: # open population file df_pop",
"daterange(date1, date2): for n in range(int ((date2 - date1).days)+1): yield date1 + timedelta(n)",
"'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota':",
"def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x),",
"# filter down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests']",
"all 3 dfs at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases =",
"['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec',",
"'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho':",
"df = fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str,",
"unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True,",
"getting data needed to fit the models.\"\"\" import bs4 from datetime import datetime",
"= pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2']",
"containing dates, recovery data (values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw =",
"Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Get state-level",
"range(int ((date2 - date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x,",
"population (int): Population count for ROI (if exists). \"\"\" try: # open population",
"dates for scraping start_dt = date(2020, 4, 12) # When JHU starts reporting",
"Args: data_path (str): Full path to data directory. roi (str): Region. Returns: population",
"= d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data",
"region)) else: if region == 'global': has_no_province = df['Province/State'].isnull() # Whole countries only;",
"col dummy_cum_col = 'dummy_' + cum_col new_col = 'new_' + col try: start",
"['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If we",
"df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y,",
"# filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left',",
"'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para',",
"results missing for: \") for roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi],",
"= dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV",
"code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data",
"number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite",
"included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can",
"monotonic spline interpolation to make sure that cumulative counts are non-decreasing. Args: df",
"'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New",
"import os from datetime import timedelta, date import pandas as pd pd.options.mode.chained_assignment =",
"this # point, increase the corresponding cumulative values by 1. neg_index = diff[diff",
"https://opencovid.ca/ \"\"\" dfs = [] # we will append dfs for cases, deaths,",
"cases where # cumulative counts decrease and new_tests becomes a large negative number",
"format # first check if roi reports recovery data as recovered if source['recovered'].isnull().all()",
"data for %s\" % state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y,",
"add new src_roi = src_trim[src_trim['region'] == roi] # filter rows that match roi",
"create temp new recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new uninfected",
"for roi and add to timeseries df if we do. Args: data_path (str):",
"for %s\" % (kind, roi)) df['new_%s' % kind] = -1 out = data_path",
"each country if state in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping",
"'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) #",
"those new cases back into dataframe. Args: roi (str): Region we are working",
"== country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2)",
"data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create",
"= df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state) df['population'] = population except: pass",
"df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs in data_path",
"columns (list, optional): Columns to make non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns:",
"vaccinations data.') pass for i in df_timeseries.columns: # Check if OWID vaccines data",
"original .csv files produced by the functions above. Args: data_path (str): Full path",
"df # Add to dictionary of dataframes # Generate a list of countries",
"'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe',",
"ctp_dfs = {} for state in states: # For each country source =",
"'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] = source['cumulative_cases'].values",
"str, filter_: Union[dict, bool] = True) -> None: \"\"\"Gets data from Johns Hopkins",
"for each province for province in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] ==",
"Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_: Union[dict, bool]",
"csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final",
"no data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries =",
"df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state) df['population'] = population",
"requests from tqdm import tqdm from typing import Union from urllib.error import HTTPError",
"country, # reformat and save that data in its own .csv file. source",
"= datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] =",
"= data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as",
"to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No data",
"'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new",
"'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] =",
"growth after = df[cum].interpolate('pchip') diff = after.diff() if diff.min() < 0: # If",
"'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi':",
"source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'): # For each",
"source['recovered'].values # check if roi reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() ==",
"= pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1, inplace=True) # removing this column",
"'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite",
"DataFrame containing counts but not new counts. columns (list): List of columns (without",
"Where JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the",
"to have # monotonic growth after = df[cum].interpolate('pchip') diff = after.diff() if diff.min()",
"to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def",
"df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime",
"dataframe optionally passing a quality check Args: d (pd.DataFrame): Data from JHU tracker",
"d (pd.DataFrame): Data from JHU tracker (e.g. df['global]). filter (bool, optional): Whether to",
"Princess', 'Recovered'] for i in to_remove: if i in rois: rois.remove(i) for roi",
"Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns:",
"df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) -> None: \"\"\"Fix negative values in daily",
"to add for {roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication",
"pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = [] for",
"df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new df_roi_tests = df_tests[df_tests['roi']",
"= np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs Fixed R=%.5g\" % (roi, c,",
"'cases': source.drop('cases', axis=1, inplace=True) # removing this column so # we can index",
"'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland':",
"'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New",
"pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try: population = df_pop.query('roi ==",
"want to miss new counts before the gap. So create a dummy dataframe",
"in roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\",",
"= -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs,",
"numbers. For example, the cumulative total of cases should not go from N",
"['confirmed', 'deaths', 'recovered']: url = url_template % (kind, region) # Create the full",
"if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover',",
"index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID vaccinations data.') pass",
"# When JHU starts reporting end_dt = date.today() dates = [] delta =",
"df['new_recover'].fillna(0).astype(int) # create temp new recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] #",
"df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df =",
"'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns)",
"we can add new df_roi_tests = df_tests[df_tests['roi'] == roi] # filter down to",
"source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values",
"a quality check Args: d (pd.DataFrame): Data from JHU tracker (e.g. df['global]). filter",
"'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de",
"axis=1, inplace=True) # removing this column so # we can index into date",
"countries only; use country name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs = []",
"directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates",
"US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v # Add 'US_' to abbrev df.replace(US_STATE_ABBREV,",
"pass for i in df_timeseries.columns: # Check if OWID vaccines data already included",
"their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs =",
"df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 #",
"df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases',",
"if roi in US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries",
"in US_STATE_ABBREV.items(): # get US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k]",
"from JHU tracker (e.g. df['global]). filter (bool, optional): Whether to filter by quality",
"bool = False) -> None: \"\"\" Gets data from Canada's Open Covid group",
"there are still negative first-differences at this # point, increase the corresponding cumulative",
"more_dfs = [] for country in ['China', 'Canada', 'Australia']: if country == 'Canada'",
"data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error:",
"False) -> None: \"\"\" Scrape JHU for US State level test results. Data",
"= {} for state in states: # For each country source = df_raw[df_raw['state']",
"\"\"\"Used by `fix_negatives` to fix negatives values for a single region. This function",
"ctp_dfs (dict): Dictionary containing US States (keys) and dataframes containing dates, recovery data",
"%s\" % ','.join(good)) # print(\"JHU data not acceptable for %s\" % ','.join(bad)) return",
"miss new counts before the gap. So create a dummy dataframe with forward",
"For each \"good\" country, # reformat and save that data in its own",
"Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT',",
"'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in",
"code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code']",
"to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered'] for i in to_remove: if i",
"src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending rois that don't have testing",
"df_roi_tests = df_tests[df_tests['roi'] == roi] # filter down to roi df_result = df_timeseries.merge(df_roi_tests,",
"get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) -> None:",
"= dfs[1] recovered = dfs[2] # combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'],",
"if state in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue",
"to add cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand",
"kind].sum() == 0: print(\"Negifying 'new_%s' for %s\" % (kind, roi)) df['new_%s' % kind]",
"True, fixes: bool = False) -> None: \"\"\" Get US vaccines data from",
"for one region. roi (str): One region, e.g 'US_MI' or 'Greece'. columns (list,",
"if roi reports recovery data as recovered if source['recovered'].isnull().all() == False: df['cum_recover'] =",
"are missing, set new counts to -1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(),",
"files produced by the functions above. Args: data_path (str): Full path to data",
"'%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False)",
"= data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as",
"'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str,",
"perform new cases calculation, then merge those new cases back into dataframe. Args:",
"list of dates for scraping start_dt = date(2020, 4, 12) # When JHU",
"Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI',",
"it is often a partial count. df = df.iloc[:-1] df = fix_neg(df, roi,",
"source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates)",
"unavailable_testing_data = [] # for appending rois that don't have testing data for",
"df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing, set new counts to -1 so",
"df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True,",
"rois.remove(i) for roi in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries =",
"False) -> None: \"\"\"Fix negative values in daily data. The purpose of this",
"https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to csvs in data_path. \"\"\" url =",
"own .csv file. source = dfs['global'] for country in tqdm(good_countries, desc='Countries'): # For",
"changes. Returns: None \"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries'",
"(pd.DataFrame): DataFrame containing data for one region. roi (str): One region, e.g 'US_MI'",
"if diff.min() < 0: # If there are still negative first-differences at this",
"counts decrease and new_tests becomes a large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests',",
"df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2',",
"= pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix",
"handle cases where column is people_tested and then switches to Total_Test_Results if 'People_Tested'",
"def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot:",
"pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True,",
"roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path:",
"'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] =",
"in df_timeseries.columns: # Check if OWID vaccines data already included if 'vaccin' in",
"df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format # first check",
"append dfs for cases, deaths, recovered here # URL for API call to",
"roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests'])",
"# 1) Cumulative columns which are zero after previously # being non-zero bad",
">= minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for",
"pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region']",
"are negative bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] = None #",
"COVID Tracking Project. https://covidtracking.com Args: data_path (str): Full path to data directory. Returns:",
"null final value which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] =",
"plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs Fixed",
"Our World In Data https://github.com/owid/covid-19-data Add columns to global csvs in data_path. \"\"\"",
"'recovered']: url_path = url_template % kind # Create the full data URL with",
"across all states'): url = url_template % i try: df = pd.read_csv(url) df_trim",
"'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato",
"Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao",
"df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover']",
"being non-zero bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] = None #",
"= df_raw['state'].unique() ctp_dfs = {} for state in states: # For each country",
"r)) plt.legend() else: after = before # Make sure the first differences are",
"data for roi in roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi) continue if",
"drop so we can add new src_roi = src_trim[src_trim['Alpha-3 code'] == roi] #",
"- date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return",
"string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2').fillna(0).astype(int) #",
"= True, fixes: bool = False) -> None: \"\"\" Get state-level data for",
"'recover'], plot: bool = False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives",
"# fix NY name src_rois = src_trim['region'].unique() for roi in src_rois: if roi",
"df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col]",
"c before = df[cum].copy() non_zeros = df[df[new] > 0].index has_negs = before.diff().min() <",
"= -1 except: print(f'No {cum_col} data to add for {roi}.') df_ffill[new_col] = -1",
"files for regions no longer tracked, such as: Diamond Princess, MS Zaandam, Samoa,",
"% country)) else: print(\"No data for %s\" % country) source = dfs['US'] states",
"and applying a monotonic spline interpolation in between valid days of data. This",
"src = pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() #",
"('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict,",
"= population except: pass # Fill NaN with 0 and convert to int",
"to fit the models.\"\"\" import bs4 from datetime import datetime import matplotlib.pyplot as",
"= df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) -> None: \"\"\"Fix negative values in",
"counts and perform new cases calculation, then merge those new cases back into",
"for API call to get Province-level timeseries data starting on Jan 22 2020",
"['cases', 'mortality', 'recovered']: url_path = url_template % kind # Create the full data",
"get_population_count(data_path, country) df['population'] = population except: pass # Fill NaN with 0 and",
"Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Gets data",
"None: \"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\"",
"~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global vaccine results missing for: \")",
"country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected'])",
"values in daily data. The purpose of this script is to fix spurious",
"df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing, set new counts",
"as fnf_error: print(fnf_error, 'Could not add OWID global vaccines data.') pass for i",
"df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] =",
"add OWID data.') pass for i in df_timeseries.columns: # Check if OWID testng",
"country) source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in",
"'new_tests'] = -1 # Handle cases where # cumulative counts decrease and new_tests",
"Check if we have population count for roi and add to timeseries df",
"str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']: if df['cum_%s' %",
"This function uses monotonic spline interpolation to make sure that cumulative counts are",
"df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] +",
"'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values",
"df[cum].copy() non_zeros = df[df[new] > 0].index has_negs = before.diff().min() < 0 if len(non_zeros)",
"print(\"JHU data not acceptable for %s\" % ','.join(bad)) return good def get_population_count(data_path:str, roi):",
"print(\"Could not download state-level data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa',",
"csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] for csv",
"to data directory. roi (str): Region. Returns: population (int): Population count for ROI",
"'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for c in columns: cum = 'cum_%s'",
"Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for",
"df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True) df =",
"'US': # Use state name as index # for k, v in US_STATE_ABBREV.items():",
"= 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']: url_path = url_template % kind",
"we can add new src_roi = src_trim[src_trim['region'] == roi] # filter rows that",
"after previously # being non-zero bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum]",
"Exclude final day because it is often a partial count. df = df.iloc[:-1]",
"# 2) New daily columns which are negative bad = df.loc[first_non_zero:, new] <",
"for cases, deaths, recovered here # URL for API call to get Province-level",
"'BR_' + state_code[state] population = get_population_count(data_path, roi) df['population'] = population except: print(\"Could not",
"elif region == 'US': # Use state name as index # for k,",
"source = pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1, inplace=True) # removing this",
"df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, country)",
"# If cumulative counts are missing, set new counts to -1 so they",
"'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates)",
"pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2') # Convert to int",
"for x in data_path.iterdir() if 'covidtimeseries' in str(x)] for csv in tqdm(csvs, desc=\"Regions\"):",
"print(\"Skipping {}\".format(state)) continue # If we have data in the downloaded JHU files",
"then cumulative tests are named 'Total_Test_Results' after 200 ish days dfs = []",
"Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected'])",
"df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, 'CA_' + province) df['population'] = population",
"data from Our World In Data https://github.com/owid/covid-19-data Add columns to global csvs in",
"cum] == 0 df.loc[bad[bad].index, cum] = None # 2) New daily columns which",
"# URL for API call to get Province-level timeseries data starting on Jan",
"same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases = dfs[0] deaths = dfs[1]",
"and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_:",
"Region. Returns: population (int): Population count for ROI (if exists). \"\"\" try: #",
"'Could not add OWID global vaccines data.') pass for i in df_timeseries.columns: #",
"to fix spurious negative values in new daily numbers. For example, the cumulative",
"y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool]",
"csv to add cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess',",
"recovery data (values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states",
"pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = [] for roi in rois: df_roi =",
"for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except",
"inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open csvs in data_path that match rois",
"its own .csv file. source = dfs['global'] for country in tqdm(good_countries, desc='Countries'): #",
"print(\"Skipping {}\".format(country)) continue # If we have data in the downloaded JHU files",
"source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values # check if roi reports recovery data",
"to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for c in columns: cum",
"= pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum() ==",
"roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns:",
"bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] = None # Protect against",
"end_dt = date.today() dates = [] delta = end_dt - start_dt delta =",
"dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col =",
"print(\"Negifying 'new_%s' for %s\" % (kind, roi)) df['new_%s' % kind] = -1 out",
"longer tracked, such as: Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands, US,",
"state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False)",
"for province in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province] # Only",
"'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of Columbia':",
"= source['date'].apply(fix_ct_dates) # Convert date format # first check if roi reports recovery",
"region == 'global': has_no_province = df['Province/State'].isnull() # Whole countries only; use country name",
"= [] # for appending rois that don't have testing data for roi",
"'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] #",
"data URL with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if",
"else: break # Then repeat if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after,",
"(list, optional): Columns to make non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame:",
"None: \"\"\" Get testing data from Our World In Data https://github.com/owid/covid-19-data Add columns",
"dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed =",
"= df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered'] for i in to_remove:",
"population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found in population_estimates.csv\".format(args.roi))",
"in ['China', 'Canada', 'Australia']: if country == 'Canada' and kind in 'recovered': continue",
"df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where column is people_tested and then",
"[] for roi in rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\")",
"bool] = True): \"\"\"Get a list of countries from a global dataframe optionally",
"dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No data for %s\" % country) source",
"= df['new_recover'] + df['new_deaths'] try: roi = 'BR_' + state_code[state] population = get_population_count(data_path,",
"format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']]",
"+ state_code[state] population = get_population_count(data_path, roi) df['population'] = population except: print(\"Could not add",
"rois that don't have testing data for roi in roi_codes_dict: if roi not",
"roi in rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] =",
"roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True)",
"= 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3",
"('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could",
"on csv to add cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond",
"label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs",
"\"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path:",
"(pd.DataFrame): DataFrame containing cumulative and now new counts. \"\"\" dfs = [] df_tmp",
"source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases = dfs[0] deaths = dfs[1] recovered =",
"dfs.append(df_trim) except HTTPError: print(\"Could not download tests data for %s\" % i) df_combined",
"try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not found in",
"Scrape the data dfs = {} for region in ['global', 'US']: dfs[region] =",
"i in df_timeseries.columns: # Check if OWID testing data already included if 'vaccin'",
"pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID vaccinations data.')",
"and then switches to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim)",
"df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x):",
"df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases',",
"on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df =",
"we can add new src_roi = src_trim[src_trim['Alpha-3 code'] == roi] # filter rows",
"'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If we have data",
"# For each country source = df_raw[df_raw['state'] == state] # Only the given",
"for US State level test results. Data is stored as a collection of",
"script is to fix spurious negative values in new daily numbers. For example,",
"df['cum_recover'] = source['recovered'].values df['new_cases'] = source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected']",
"cumulative counts go to -1 for missing data and the difference is taken",
"roi not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\",",
"< 0 if len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max()",
"'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_: Union[dict, bool] = True)",
"quality criteria. \"\"\" good = set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict): filter_",
"= df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing, set new counts to -1",
"< 0, 'new_tests'] = -1 # Handle cases where # cumulative counts decrease",
"countries from a global dataframe optionally passing a quality check Args: d (pd.DataFrame):",
"in ['20', '21'])]] # Use only data columns # 20 or 21 signifies",
"df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df =",
"df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'],",
"and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for key, minimum in",
"src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True)",
"files for that country if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths',",
"to fix negatives values for a single region. This function uses monotonic spline",
"World In Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to csvs in data_path.",
"data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations',",
"where # cumulative counts decrease and new_tests becomes a large negative number df_result['new_tests']",
"= (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs = {} for region",
"If there are still negative first-differences at this # point, increase the corresponding",
"\"\"\" dfs = [] # we will append dfs for cases, deaths, recovered",
"data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {} for",
"df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global test results missing for:",
"data in its own .csv file. source = dfs['global'] for country in tqdm(good_countries,",
"df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv'",
"df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] =",
"{} for state in states: # For each country source = df_raw[df_raw['state'] ==",
"delta = end_dt - start_dt delta = delta.days for dt in daterange(start_dt, end_dt):",
"fixes: bool = False) -> None: \"\"\" Get global vaccines data from Our",
"'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT',",
"of regions. It overwrites the original .csv files produced by the functions above.",
"Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) -> None:",
"import requests from tqdm import tqdm from typing import Union from urllib.error import",
"% c before = df[cum].copy() non_zeros = df[df[new] > 0].index has_negs = before.diff().min()",
"Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba',",
"dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois",
"data for one region. roi (str): One region, e.g 'US_MI' or 'Greece'. columns",
"for year in ['20', '21'])]] # Use only data columns # 20 or",
"= False) -> None: \"\"\" Gets data from Canada's Open Covid group for",
"counts are missing, set new counts to -1 so they don't become 0.",
"axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def",
"12) # When JHU starts reporting end_dt = date.today() dates = [] delta",
"= pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format # first check if",
"for print statements. df (pd.DataFrame): DataFrame containing counts but not new counts. columns",
"= df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could not get tests",
"data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv')",
"in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region',",
"data URL try: df = pd.read_csv(url) # Download the data into a dataframe",
"non_zeros = df[df[new] > 0].index has_negs = before.diff().min() < 0 if len(non_zeros) and",
"combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'],",
"from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove {}. Check that path is correct.\".format(csv))",
"df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left')",
"/ (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions no longer",
"get_countries(dfs['global'], filter_=filter_) # For each \"good\" country, # reformat and save that data",
"match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations',",
"df_tests[df_tests['roi'] == roi] # filter down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left')",
"False: df['cum_recover'] = source['recovered'].values # check if roi reports recovery data as hospitalizedDischarged",
"which screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx # Then",
"src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for",
"fnf_error: print(fnf_error, 'Could not add OWID data.') pass for i in df_timeseries.columns: #",
"roi (str): Region we are working with; used for print statements. df (pd.DataFrame):",
"to US csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim",
"if OWID testng data already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True)",
"= src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] #",
"inplace=True) # sort by datetime obj before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y')",
"cases calculation, then merge those new cases back into dataframe. Args: roi (str):",
"= pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2']) rois = df_combined['Province_State'].unique() sorted_dfs = []",
"Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon':",
"cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global vaccine",
"df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries",
"0 and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country))",
"given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] =",
"= \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population =",
"cum_tests and new_tests to csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src =",
"duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new)",
"['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for c in columns: cum =",
"the corresponding # cumulative column, which are: # 1) Cumulative columns which are",
"(without cum_ prefix) so create new counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing",
"bool] = True, fixes: bool = False) -> None: \"\"\" Get global vaccines",
"df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global",
"elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True)",
"to filter by quality criteria. \"\"\" good = set(d['confirmed'].index) if filter_ and not",
"timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError",
"= set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_:",
"maxx = df.loc[first_non_zero, cum].max() # Find the bad entries and null the corresponding",
"'%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool = False) -> None:",
"True, fixes: bool = False) -> None: \"\"\" Gets data from Canada's Open",
"data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data",
"# Check if OWID vaccines data already included if 'vaccin' in i: df_timeseries.drop([i],",
"state name as index # for k, v in US_STATE_ABBREV.items(): # get US",
"int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path: str, filter_:",
"daily numbers. For example, the cumulative total of cases should not go from",
"= None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS",
"\"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates for scraping start_dt = date(2020, 4,",
"'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut':",
"total of cases should not go from N to a value less than",
"'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD',",
"'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA',",
"== 'cases': source.drop('cases', axis=1, inplace=True) # removing this column so # we can",
"df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1",
"code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3",
"roi (str): Region. Returns: population (int): Population count for ROI (if exists). \"\"\"",
"= url_template % (kind, region) # Create the full data URL try: df",
"df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state) df['population'] = population except: pass dfs[state]",
"print(\"No data for %s\" % state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return",
"set new counts to -1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] =",
"number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases',",
"'deaths', 'recover'], plot: bool = False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix",
"Fill NaN with 0 and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path /",
"so # we can index into date on all 3 dfs at same",
"and add to timeseries df if we do. Args: data_path (str): Full path",
"'recover']: if df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s' for %s\" % (kind,",
"df_fixed (pd.DataFrame): DataFrame containing cumulative and now new counts. \"\"\" dfs = []",
"province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates)",
"'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia':",
"data into a dataframe except HTTPError: print(\"Could not download data for %s, %s\"",
"df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data path.\") try: for i",
"\"\"\" Check if we have population count for roi and add to timeseries",
"except: print(\"{} population estimate not found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str):",
"# check if roi reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False:",
"df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases']",
"# Create the full data URL try: df = pd.read_csv(url) # Download the",
"= {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska':",
"df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries",
"# combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date',",
"not found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery",
"JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover':",
"purpose of this script is to fix spurious negative values in new daily",
"functions above. Args: data_path (str): Full path to data directory. plot (bool): Whether",
"(American Somoa)\"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)]",
"-> None: \"\"\" Get state-level data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url =",
"= df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 =",
"df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation df = df.fillna(-1).astype(int) df",
"df = df[[x for x in df if any(year in x for year",
"archived US recovery data from The COVID Tracking Project. https://covidtracking.com Args: data_path (str):",
"how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(US_STATE_ABBREV[roi], df_combined, cum_vacc_columns) df",
"'Delaware': 'US_DE', 'District of Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii':",
"rois: df_roi = df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi)",
"at this # point, increase the corresponding cumulative values by 1. neg_index =",
"filter_: Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Get",
"data already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so",
"open population file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\")",
"monotonic growth after = df[cum].interpolate('pchip') diff = after.diff() if diff.min() < 0: #",
"and test results. Args: data_path (str): Full path to data directory. Returns: None",
"{'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais',",
"difference is taken between a new cumulative count and -1. We don't want",
"= df['Province/State'].isnull() # Whole countries only; use country name as index df1 =",
"by the functions above. Args: data_path (str): Full path to data directory. plot",
"non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find the bad entries and null the",
"/ ('covidtimeseries_%s.csv' % country)) else: print(\"No data for %s\" % country) source =",
"start_dt delta = delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests",
"'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If",
"list of countries that have \"good\" data, # according to these criteria: good_countries",
"-> None: \"\"\" Get global vaccines data from Our World In Data https://github.com/owid/covid-19-data",
"# skipping because no data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' %",
"deaths = dfs[1] recovered = dfs[2] # combine dfs df_rawtemp = cases.merge(recovered, on=['date',",
"None: \"\"\" Get US vaccines data from Our World In Data https://github.com/owid/covid-19-data Add",
"or 2021 dfs[region][kind] = df # Add to dictionary of dataframes # Generate",
"timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str,",
"'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova",
"for roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def",
"'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH',",
"# Use state name as index # for k, v in US_STATE_ABBREV.items(): #",
"testing data for roi in roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi) continue",
"which: while True: # Interpolates the cumulative column nulls to have # monotonic",
"= src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True,",
"Fill NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state]))",
"and we don't want to miss new counts before the gap. So create",
"tqdm import tqdm from typing import Union from urllib.error import HTTPError import urllib.request,",
"after = before # Make sure the first differences are now all non-negative",
"'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin':",
"'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta',",
"match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0,",
"df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV,",
"in daily data. The purpose of this script is to fix spurious negative",
"one region. roi (str): One region, e.g 'US_MI' or 'Greece'. columns (list, optional):",
"source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] =",
"-1 except: print(f'No {cum_col} data to add for {roi}.') df_ffill[new_col] = -1 df_ffill",
"kind] = -1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete",
"in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad data continue",
"df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi",
"\"date\" }, inplace=True) dfs.append(source) cases = dfs[0] deaths = dfs[1] recovered = dfs[2]",
"https://github.com/owid/covid-19-data Add columns to US csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src",
"in df if any(year in x for year in ['20', '21'])]] # Use",
"df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2') # Convert",
"'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for",
"'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values",
"# Convert date format # first check if roi reports recovery data as",
"= source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj",
"calculation df = df.fillna(-1).astype(int) df = df.drop(['tmp_new_recover'], axis=1) try: population = get_population_count(data_path, state)",
"% state)) else: print(\"No data for %s\" % state) def fix_jhu_dates(x): y =",
"downloaded JHU files for that country if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2',",
"to csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes =",
"'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT',",
"0 # Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str)",
"\"\"\" Gets data from Canada's Open Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\"",
"'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada':",
"= 'new_%s' % c before = df[cum].copy() non_zeros = df[df[new] > 0].index has_negs",
"repeat if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before,",
"None: \"\"\" Gets data from Canada's Open Covid group for Canadian Provinces. https://opencovid.ca/",
"Islands, US, US_AS (American Somoa)\"\"\" csvs = [x for x in data_path.iterdir() if",
"valid days of data. This only affects a small number of regions. It",
"[x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess',",
"Get US vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns to",
"convert dates to string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and",
"urllib.error import HTTPError import urllib.request, json import os from datetime import timedelta, date",
"except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID global vaccines data.') pass",
"cumulative tests are named 'People_Tested' for first 200 ish days # then cumulative",
"data columns # 20 or 21 signifies 2020 or 2021 dfs[region][kind] = df",
"'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] == state] #",
"= df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests =",
"country source = df_raw[df_raw['state'] == state] # Only the given state df =",
"working with; used for print statements. df (pd.DataFrame): DataFrame containing counts but not",
"< 0].index df.loc[neg_index, cum] += 1 else: break # Then repeat if plot:",
"filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff()",
"data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS Zaandam',",
"'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in csvs: roi",
"Make sure the first differences are now all non-negative assert after.diff().min() >= 0",
"df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy()",
"\"\"\" good = set(d['confirmed'].index) if filter_ and not isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS",
"In Data https://github.com/owid/covid-19-data Add columns to US csvs in data_path. \"\"\" url =",
"CSV except: print(f'Could not get tests data for {roi}.') def daterange(date1, date2): for",
"\"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs = {} for region in ['global', 'US']:",
"population = get_population_count(data_path, state) df['population'] = population except: pass dfs[state] = df dfs[state].to_csv(data_path",
"all non-negative assert after.diff().min() >= 0 # Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values",
"roi: str, columns: list = ['cases', 'deaths', 'recover'], plot: bool = False) ->",
"'Recovered'] for i in to_remove: if i in rois: rois.remove(i) for roi in",
"bool] = True, fixes: bool = False) -> None: \"\"\" Get testing data",
"df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new =",
"Find the bad entries and null the corresponding # cumulative column, which are:",
"'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine",
"counts. \"\"\" dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in columns:",
"= source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover']",
"= [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in columns: cum_col = 'cum_'",
"data from Canada's Open Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs =",
"'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima',",
"(roi, c, r)) plt.legend() else: after = before # Make sure the first",
"data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path,",
"'roi'}, inplace=True) # now open csvs in data_path that match rois and merge",
"df_timeseries.columns: # Check if OWID vaccines data already included if 'vaccin' in i:",
"'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI',",
"often a partial count. df = df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path",
".csv file. source = dfs['global'] for country in tqdm(good_countries, desc='Countries'): # For each",
"df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] =",
"= pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases",
"non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for c in",
"cum counts go missing and new counts get missed. New counts spike when",
"('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could",
"import urllib.request, json import os from datetime import timedelta, date import pandas as",
"dates to string df = df.set_index('dates2') # Convert to int df['new_recover'] = df['cum_recover'].diff()",
"index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID global vaccines data.')",
"df_roi = df_combined[df_combined['Province_State'] == roi] df_roi = df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests",
"desc=f'Scraping {delta} days of data across all states'): url = url_template % i",
"then switches to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if",
"'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD',",
"pd.read_csv(url) # Download the data into a dataframe except HTTPError: print(\"Could not download",
"True, fixes: bool = False) -> None: \"\"\" Get global vaccines data from",
"ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool] = True,",
"country name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for country in",
"'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_: Union[dict, bool] = True) ->",
"df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths']",
"with; used for print statements. df (pd.DataFrame): DataFrame containing counts but not new",
"# drop so we can add new src_roi = src_trim[src_trim['Alpha-3 code'] == roi]",
"counts before the gap. So create a dummy dataframe with forward filled cumulative",
"'cum_%s' % c new = 'new_%s' % c before = df[cum].copy() non_zeros =",
"'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI',",
"passing a quality check Args: d (pd.DataFrame): Data from JHU tracker (e.g. df['global]).",
"'new_%s' for %s\" % (kind, roi)) df['new_%s' % kind] = -1 out =",
"(countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to data directory. Returns:",
"large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path)",
"= src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim",
"# Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert",
"datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] = True): \"\"\"Get a list",
"Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] == state]",
"are named 'Total_Test_Results' after 200 ish days dfs = [] for i in",
"the data dfs = {} for region in ['global', 'US']: dfs[region] = {}",
"States'): source = df_raw[df_raw['state'] == state] # Only the given province df =",
"Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA',",
"Princess', 'Grand Princess', 'Recovered'] for i in to_remove: if i in rois: rois.remove(i)",
"date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths',",
"data df = df[[x for x in df if any(year in x for",
"in the downloaded JHU files for that country if state in source['confirmed'].index: df",
"counts go missing and new counts get missed. New counts spike when cumulative",
"that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns =",
"'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we have",
"fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool] = True, fixes:",
"def get_population_count(data_path:str, roi): \"\"\" Check if we have population count for roi and",
"data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs = {}",
"of this script is to fix spurious negative values in new daily numbers.",
"year in ['20', '21'])]] # Use only data columns # 20 or 21",
"if we have population count for roi and add to timeseries df if",
"source.drop('cases', axis=1, inplace=True) # removing this column so # we can index into",
"missing data and the difference is taken between a new cumulative count and",
"for key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough)",
"# sort by datetime obj before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') #",
"after)[0, 1] plt.title(\"%s %s Raw vs Fixed R=%.5g\" % (roi, c, r)) plt.legend()",
"removing this column so # we can index into date on all 3",
"for i in df_timeseries.columns: # Check if OWID vaccines data already included if",
"'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa':",
"by datetime obj before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates",
"'Yukon'] # Export timeseries data for each province for province in tqdm(provinces, desc='Canadian",
"such as: Diamond Princess, MS Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS (American",
"pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down",
"source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before",
"'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa':",
"axis=1, inplace=True) # drop so we can add new src_roi = src_trim[src_trim['region'] ==",
"dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested' for first",
"'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee':",
"df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp",
"only; use country name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for",
"dates = [] delta = end_dt - start_dt delta = delta.days for dt",
"Export timeseries data for each province for province in tqdm(provinces, desc='Canadian Provinces'): source",
"col in columns: cum_col = 'cum_' + col dummy_cum_col = 'dummy_' + cum_col",
"for state in tqdm(states, desc='US States'): # For each country if state in",
"want it to spike, and we don't want to miss new counts before",
"source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] =",
"df if we do. Args: data_path (str): Full path to data directory. roi",
"= delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named",
"'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana':",
"src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] =",
"global vaccines data from Our World In Data https://github.com/owid/covid-19-data Add columns to global",
"'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite",
"vs Fixed R=%.5g\" % (roi, c, r)) plt.legend() else: after = before #",
"'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in csvs:",
"data in the downloaded JHU files for that country if state in source['confirmed'].index:",
"negative values in daily data. The purpose of this script is to fix",
"and has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find the bad",
"roi)) df['new_%s' % kind] = -1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def",
"new recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation df",
"data.') pass for i in df_timeseries.columns: # Check if OWID vaccines data already",
"get_population_count(data_path:str, roi): \"\"\" Check if we have population count for roi and add",
"filter_: for key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good =",
"= source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover']",
"have testing data for roi in roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi)",
"state) def fix_jhu_dates(x): y = datetime.strptime(x, '%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return",
"in to_remove: if i in rois: rois.remove(i) for roi in rois: csv_path =",
"Args: roi (str): Region we are working with; used for print statements. df",
"convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No",
"Returns: ctp_dfs (dict): Dictionary containing US States (keys) and dataframes containing dates, recovery",
"i in rois: rois.remove(i) for roi in rois: csv_path = data_path / f'covidtimeseries_{roi}.csv'",
"2) New daily columns which are negative bad = df.loc[first_non_zero:, new] < 0",
"Interpolates the cumulative column nulls to have # monotonic growth after = df[cum].interpolate('pchip')",
"= JHU_FILTER_DEFAULTS if filter_: for key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >=",
"in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new src_roi",
"people_tested and then switches to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values",
"'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA',",
"= covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'): # For each country if",
"# Generate a list of countries that have \"good\" data, # according to",
"= [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] for csv in",
"in df_timeseries.columns: # Check if testng data already included if 'tests' in i:",
"timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError",
"\"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad data continue try: timeseries_path = data_path",
"= 'BR_' + state_code[state] population = get_population_count(data_path, roi) df['population'] = population except: print(\"Could",
"'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA',",
"return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool] = True, fixes: bool",
"i in df_timeseries.columns: # Check if OWID vaccines data already included if 'vaccin'",
"nulling such data and applying a monotonic spline interpolation in between valid days",
"for kind in ['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum() == 0: print(\"Negifying",
"for i in df_timeseries.columns: # Check if OWID testing data already included if",
"\"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict",
"already included if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we",
"src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() #",
"Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs = [x for x in",
"'dummy_' + cum_col new_col = 'new_' + col try: start = df_tmp[df_tmp[cum_col] >",
"Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY'",
"try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data path.\") try: for",
"in data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate not",
"if region == 'global': has_no_province = df['Province/State'].isnull() # Whole countries only; use country",
"% state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool] = True, fixes: bool =",
"src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True) #",
"before # Make sure the first differences are now all non-negative assert after.diff().min()",
"df = pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum()",
"print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are cases",
"df_raw[df_raw['state'] == state] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover',",
"Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina':",
"roi and add to timeseries df if we do. Args: data_path (str): Full",
"Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date",
"\"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs = {} for region in ['global',",
"import timedelta, date import pandas as pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS",
"'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests',",
"/ ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool] = True, fixes:",
"df_roi.sort_values(by=\"Date\") df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State':",
"date.today() dates = [] delta = end_dt - start_dt delta = delta.days for",
"pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases', axis=1, inplace=True) # removing this column so",
"US recovery data from The COVID Tracking Project. https://covidtracking.com Args: data_path (str): Full",
"return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False) -> None:",
"try: roi = 'BR_' + state_code[state] population = get_population_count(data_path, roi) df['population'] = population",
"bool] = True) -> None: \"\"\"Gets data from Johns Hopkins CSSEGIS (countries only).",
"criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For each \"good\" country, # reformat and",
"if filter_: for key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good",
"# cumulative counts decrease and new_tests becomes a large negative number df_combined[['cum_tests', 'new_tests']]",
"except HTTPError: print(\"Could not download tests data for %s\" % i) df_combined =",
"'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina':",
"Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and now new counts. \"\"\" dfs =",
"df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new recover for df['new_uninfected']",
"roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] =",
"Raw vs Fixed R=%.5g\" % (roi, c, r)) plt.legend() else: after = before",
"'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download tests",
"index # for k, v in US_STATE_ABBREV.items(): # get US state abbrev #",
"dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code']",
"do. Args: data_path (str): Full path to data directory. roi (str): Region. Returns:",
"% roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not",
"# Handle cases where # cumulative counts decrease and new_tests becomes a large",
"def get_jhu(data_path: str, filter_: Union[dict, bool] = True) -> None: \"\"\"Gets data from",
"df[has_no_province].set_index('Country/Region') more_dfs = [] for country in ['China', 'Canada', 'Australia']: if country ==",
"[] for country in ['China', 'Canada', 'Australia']: if country == 'Canada' and kind",
"= src_trim['region'].unique() for roi in src_rois: if roi in US_STATE_ABBREV: try: timeseries_path =",
"'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data for each province for",
"group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = [] # we will append",
"not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\",",
"Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we have data in the",
"with forward filled cumulative counts and perform new cases calculation, then merge those",
"on all 3 dfs at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases",
"from Our World In Data https://github.com/owid/covid-19-data Add columns to US csvs in data_path.",
"dataframes # Generate a list of countries that have \"good\" data, # according",
"source = dfs['global'] for country in tqdm(good_countries, desc='Countries'): # For each country if",
"except: pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No data",
"new_col] = -1 except: print(f'No {cum_col} data to add for {roi}.') df_ffill[new_col] =",
"'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV",
"as pd pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1,",
"'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state],",
"# For each country if state in ['Diamond Princess', 'Grand Princess', 'MS Zaandam',",
"after.diff().min() >= 0 # Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return df def",
"i in to_remove: if i in rois: rois.remove(i) for roi in rois: csv_path",
"count. df = df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def",
"df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, 'CA_'",
"try: population = get_population_count(data_path, country) df['population'] = population except: pass # Fill NaN",
"% (kind, region) # Create the full data URL try: df = pd.read_csv(url)",
"'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul',",
"code'].unique() unavailable_testing_data = [] # for appending rois that don't have testing data",
"tests data for %s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] =",
"date containing states and test results. Args: data_path (str): Full path to data",
"single region. This function uses monotonic spline interpolation to make sure that cumulative",
"not add OWID global vaccines data.') pass for i in df_timeseries.columns: # Check",
"Convert to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path:",
"optional): Columns to make non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description]",
"'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York':",
"days dfs = [] for i in tqdm(dates, desc=f'Scraping {delta} days of data",
"corresponding # cumulative column, which are: # 1) Cumulative columns which are zero",
"if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v # Add 'US_' to",
"src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] =",
"print(fnf_error, 'Could not add OWID global vaccines data.') pass for i in df_timeseries.columns:",
"df['global]). filter (bool, optional): Whether to filter by quality criteria. \"\"\" good =",
"if i in rois: rois.remove(i) for roi in rois: csv_path = data_path /",
"region == 'US': # Use state name as index # for k, v",
"to_remove: if i in rois: rois.remove(i) for roi in rois: csv_path = data_path",
"= -1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series",
"abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v # Add",
"df['population'] = population except: print(\"Could not add population data for {}\".format(state)) pass df.sort_values(by=['dates2'],",
"# Interpolates the cumulative column nulls to have # monotonic growth after =",
"= df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new recover for",
"((date2 - date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y')",
"= df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New",
"cumulative total of cases should not go from N to a value less",
"src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix",
"'New York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio':",
"= df[[x for x in df if any(year in x for year in",
"'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco',",
"df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path:",
"acceptable for %s\" % ','.join(good)) # print(\"JHU data not acceptable for %s\" %",
"'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values",
"start_dt = date(2020, 4, 12) # When JHU starts reporting end_dt = date.today()",
"df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict, bool] = True,",
"get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) -> None:",
"and new counts get missed. New counts spike when cumulative counts go to",
"for missing data and the difference is taken between a new cumulative count",
"== roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2')",
"import numpy as np import pandas as pd import requests from tqdm import",
"'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we have data",
"go to -1 for missing data and the difference is taken between a",
"in src_rois: if roi in US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv' %",
"Tracking Project. https://covidtracking.com Args: data_path (str): Full path to data directory. Returns: ctp_dfs",
"Fixed R=%.5g\" % (roi, c, r)) plt.legend() else: after = before # Make",
"full data URL with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind])",
"new_tests becomes a large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined =",
"str, filter_: Union[dict, bool] = False) -> None: \"\"\" Scrape JHU for US",
"pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed",
"make non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for c",
"countries that have \"good\" data, # according to these criteria: good_countries = get_countries(dfs['global'],",
"Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota':",
"results. Data is stored as a collection of CSVs per date containing states",
"population file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in data-path\") try:",
"'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY',",
"# Make sure the first differences are now all non-negative assert after.diff().min() >=",
"df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'}, inplace=True) # now open",
"spline interpolation to make sure that cumulative counts are non-decreasing. Args: df (pd.DataFrame):",
"cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV def fix_owid_dates(x): y",
"= population except: pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else:",
"int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data from The COVID Tracking",
"where # cumulative counts decrease and new_tests becomes a large negative number df_combined[['cum_tests',",
"'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana':",
"end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are cases where",
"data_path that match rois and merge on csv to add cum_test and new_tests",
"'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'): source",
"(\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs = {} for region in",
"= before # Make sure the first differences are now all non-negative assert",
"region. This function uses monotonic spline interpolation to make sure that cumulative counts",
"['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia',",
"Fill NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province))",
"count for ROI (if exists). \"\"\" try: # open population file df_pop =",
"= 'new_' + col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:]",
"columns which are zero after previously # being non-zero bad = df.loc[first_non_zero:, cum]",
"have \"good\" data, # according to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) #",
"state) df['population'] = population except: pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' %",
"to int df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path: str,",
"datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool = False) -> None: \"\"\"Fix negative",
"the models.\"\"\" import bs4 from datetime import datetime import matplotlib.pyplot as plt import",
"fix_jhu_testing_dates(i) # handle cases where column is people_tested and then switches to Total_Test_Results",
"For each country if country in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa',",
"i in df_timeseries.columns: # Check if testng data already included if 'tests' in",
"pd.read_csv(csv) # Exclude final day because it is often a partial count. df",
"= source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\",
"'Micronesia', 'Kiribati', 'Palau'] for csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi",
"in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested' for first 200",
"exists). \"\"\" try: # open population file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except:",
"'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def",
"# Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum()",
"= src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] = src['Cumulative",
"dfs for cases, deaths, recovered here # URL for API call to get",
"def get_owid_tests(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) ->",
"# removing this column so # we can index into date on all",
"Add columns to US csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src =",
"has_negs = before.diff().min() < 0 if len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx",
"'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1]",
"appending rois that don't have testing data for roi in roi_codes_dict: if roi",
"Island': 'US_RI', 'South Carolina': 'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah':",
"and dataframes containing dates, recovery data (values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv'",
"'US_WY' } def get_jhu(data_path: str, filter_: Union[dict, bool] = True) -> None: \"\"\"Gets",
"{ 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California':",
"[] # for appending rois that don't have testing data for roi in",
"subsequent day. This script fixes this by nulling such data and applying a",
"pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID global vaccines",
"pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values",
"index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID data.') pass for",
"# Scrape the data dfs = {} for region in ['global', 'US']: dfs[region]",
"https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could",
"tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) # Exclude final day because",
"for that country if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover',",
"cum].max() # Find the bad entries and null the corresponding # cumulative column,",
"False) -> None: \"\"\" Get US vaccines data from Our World In Data",
"country in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US',",
"def get_canada(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) ->",
"= pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['ISO",
"'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota':",
"source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff() # add recovery data",
"if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa',",
"df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str,",
"url_template % i try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests', 'dates2']) df_trim['Province_State']",
"screws up interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx # Then run",
"= df return ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool] = True, fixes:",
"in ['cases', 'mortality', 'recovered']: url_path = url_template % kind # Create the full",
"if len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find",
"src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict",
"= df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] = source['deaths'].loc[state].values df[['new_cases', 'new_deaths']] = df[['cum_cases', 'cum_deaths']].diff()",
"in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] =",
"df['population'] = population except: pass # Fill NaN with 0 and convert to",
"2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']: url_path = url_template",
"don't have testing data for roi in roi_codes_dict: if roi not in src_rois:",
"if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns",
"pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date",
"data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so",
"counts but not new counts. columns (list): List of columns (without cum_ prefix)",
"datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False)",
"df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi = 'BR_' +",
"US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas':",
"= df.groupby('Province_State').sum() # combine counties to create state level data df = df[[x",
"in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good)",
"Check if OWID vaccines data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1,",
"columns (without cum_ prefix) so create new counts for. Returns: df_fixed (pd.DataFrame): DataFrame",
"= df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties to create state level data",
"nulls to have # monotonic growth after = df[cum].interpolate('pchip') diff = after.diff() if",
"in csvs: roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try: if os.path.exists(csv):",
"source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) #",
"= pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN",
"region in ['global', 'US']: dfs[region] = {} for kind in ['confirmed', 'deaths', 'recovered']:",
"index into date on all 3 dfs at same position source.rename(columns={source.columns[1]: \"date\" },",
"skipping because no data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi])",
"MS Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs = [x",
"'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK',",
"'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover']",
"into dataframe. Args: roi (str): Region we are working with; used for print",
"between valid days of data. This only affects a small number of regions.",
"'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path)",
"states'): url = url_template % i try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State',",
"2021 dfs[region][kind] = df # Add to dictionary of dataframes # Generate a",
"url = url_template % i try: df = pd.read_csv(url) df_trim = pd.DataFrame(columns=['Province_State', 'cum_tests',",
"src['ISO code'].values src_trim['cum_tests'] = src['Cumulative total'].fillna(-1).astype(int).values src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data",
"string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert to int",
"\") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There are cases where cum",
"and kind in 'recovered': continue is_c = df['Country/Region'] == country df2 = df[is_c].sum(axis=0,",
"to spike, and we don't want to miss new counts before the gap.",
"in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values",
"i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new src_roi =",
"rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests']",
"print statements. df (pd.DataFrame): DataFrame containing counts but not new counts. columns (list):",
"%s\" % (kind, roi)) df['new_%s' % kind] = -1 out = data_path /",
"datetime obj before converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to",
"is_c = df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2",
"recovery data df.set_index('dates2', inplace=True) df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) #",
"'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data for each province for province in",
"'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island':",
"'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine':",
"Download the data into a dataframe except HTTPError: print(\"Could not download data for",
"to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties",
"\"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations',",
"src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path",
"from N to a value less than N on a subsequent day. This",
"\"\"\"Gets archived US recovery data from The COVID Tracking Project. https://covidtracking.com Args: data_path",
"Our World In Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to csvs in",
"> 0].index has_negs = before.diff().min() < 0 if len(non_zeros) and has_negs: first_non_zero =",
"dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer')",
"'CA_' + province) df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True) # sort by",
"df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new src_roi = src_trim[src_trim['Alpha-3",
"on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']:",
"\") for roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\")",
"str) -> None: \"\"\"Fix negative values in daily data. The purpose of this",
"Grande do Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code,",
"timeseries CSV print(\"OWID global vaccine results missing for: \") for roi in roi_codes_dict:",
"after, label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs Fixed R=%.5g\"",
"src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] =",
"'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['iso_code'].values",
"'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined,",
"new counts get missed. New counts spike when cumulative counts go to -1",
"= {'confirmed': 5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1,",
"= source['newCases'].values df['new_deaths'] = source['newDeaths'].values df['new_recover'] = df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths']",
"True, fixes: bool = False) -> None: \"\"\" Get testing data from Our",
"% (roi, c, r)) plt.legend() else: after = before # Make sure the",
"df = df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df:",
"stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data dfs",
"'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases']",
"than N on a subsequent day. This script fixes this by nulling such",
"in between valid days of data. This only affects a small number of",
"try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col]",
"== 'US': # Use state name as index # for k, v in",
"corresponding cumulative values by 1. neg_index = diff[diff < 0].index df.loc[neg_index, cum] +=",
"reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else:",
"def fix_neg(df: pd.DataFrame, roi: str, columns: list = ['cases', 'deaths', 'recover'], plot: bool",
"before.diff().min() < 0 if len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero,",
"if kind == 'cases': source.drop('cases', axis=1, inplace=True) # removing this column so #",
"+ df['new_deaths'] try: population = get_population_count(data_path, 'CA_' + province) df['population'] = population except:",
"= src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that match roi df_combined =",
"if 'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add",
"'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ',",
"recovered if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values # check if roi reports",
"Args: df (pd.DataFrame): DataFrame containing data for one region. roi (str): One region,",
"test results missing for: \") for roi in roi_codes_dict: if roi in unavailable_testing_data:",
"df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns",
"# print(\"JHU data acceptable for %s\" % ','.join(good)) # print(\"JHU data not acceptable",
"list of countries from a global dataframe optionally passing a quality check Args:",
"if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad",
"tracker (e.g. df['global]). filter (bool, optional): Whether to filter by quality criteria. \"\"\"",
"the downloaded JHU files for that country if state in source['confirmed'].index: df =",
"Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state)) continue # If we have",
"new cases back into dataframe. Args: roi (str): Region we are working with;",
"country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] = country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df",
"roi reports recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values",
"except: print(\"Missing population_estimates.csv in data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{}",
"Add columns cum_tests and new_tests to csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv'",
"\"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates for scraping start_dt",
"not download tests data for %s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True)",
"= df[cum].copy() non_zeros = df[df[new] > 0].index has_negs = before.diff().min() < 0 if",
"Returns: None \"\"\" # Where JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\"",
"converting to string df['dates2'] = pd.to_datetime(df['dates2']).dt.strftime('%m/%d/%y') # convert dates to string df =",
"df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, country) df['population'] = population",
"list): \"\"\" There are cases where cum counts go missing and new counts",
"roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\"",
"src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated']",
"filter_=filter_) # For each \"good\" country, # reformat and save that data in",
"= df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert to int df.to_csv(data_path /",
"with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path:",
"found in data path.\") try: for i in df_timeseries.columns: # Check if testng",
"= df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str,",
"Data https://github.com/owid/covid-19-data Add columns to global csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv'",
"'US_WV', 'Wisconsin': 'US_WI', 'Wyoming': 'US_WY' } def get_jhu(data_path: str, filter_: Union[dict, bool] =",
"= df_raw[df_raw['state'] == state] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths',",
"get_brazil(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False) -> None:",
"states: # For each country source = df_raw[df_raw['state'] == state] # Only the",
"'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] =",
"OWID vaccines data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) #",
"# monotonic growth after = df[cum].interpolate('pchip') diff = after.diff() if diff.min() < 0:",
"roi) df['population'] = population except: print(\"Could not add population data for {}\".format(state)) pass",
"add population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj",
"roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could",
"in df_timeseries.columns: # Check if OWID testng data already included if 'tests' in",
"decrease and new_tests becomes a large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values",
"fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool",
"for roi in roi_codes_dict: if roi not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi]",
"data for %s\" % country) source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data =",
"a collection of CSVs per date containing states and test results. Args: data_path",
"%s Raw vs Fixed R=%.5g\" % (roi, c, r)) plt.legend() else: after =",
"global vaccine results missing for: \") for roi in roi_codes_dict: if roi in",
"in x for year in ['20', '21'])]] # Use only data columns #",
"in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS",
"'US_SC', 'South Dakota': 'US_SD', 'Tennessee': 'US_TN', 'Texas': 'US_TX', 'Utah': 'US_UT', 'Vermont': 'US_VT', 'Virgin",
"by `fix_negatives` to fix negatives values for a single region. This function uses",
"[\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad data continue try:",
"are still negative first-differences at this # point, increase the corresponding cumulative values",
"def fix_negatives(data_path: str, plot: bool = False) -> None: \"\"\"Fix negative values in",
"src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New",
"states and test results. Args: data_path (str): Full path to data directory. Returns:",
"df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths'] = source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] =",
"roi = 'BR_' + state_code[state] population = get_population_count(data_path, roi) df['population'] = population except:",
"pd.DataFrame, roi: str, columns: list = ['cases', 'deaths', 'recover'], plot: bool = False)",
"and merge on csv to add cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove",
"filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist() good = good.intersection(enough) bad = set(d['confirmed'].index).difference(good) #",
"# open population file df_pop = pd.read_csv(data_path / 'population_estimates.csv') except: print(\"Missing population_estimates.csv in",
"= source['cumulative_deaths'].values df['cum_recover'] = source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff()",
"because bad data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries",
"In Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to csvs in data_path. \"\"\"",
"df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global vaccine results",
"# fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed",
"= True): \"\"\"Get a list of countries from a global dataframe optionally passing",
"import pandas as pd import requests from tqdm import tqdm from typing import",
"c, r)) plt.legend() else: after = before # Make sure the first differences",
"~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global test results missing for: \")",
"daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative tests are named 'People_Tested' for first 200 ish",
"for x in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess', 'Grand",
"province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['date']",
"columns: list): \"\"\" There are cases where cum counts go missing and new",
"for region in ['global', 'US']: dfs[region] = {} for kind in ['confirmed', 'deaths',",
"cumulative counts and perform new cases calculation, then merge those new cases back",
"if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict,",
"Columns to make non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\"",
"+ col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True,",
"'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN',",
"counts. columns (list): List of columns (without cum_ prefix) so create new counts",
"Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because no data continue try: timeseries_path =",
"state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2']",
"code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated']",
"False) -> None: \"\"\" Get global vaccines data from Our World In Data",
"OWID global vaccines data.') pass for i in df_timeseries.columns: # Check if OWID",
"src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations']",
"vaccines data.') pass for i in df_timeseries.columns: # Check if OWID testing data",
"'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey':",
"dataframes containing dates, recovery data (values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw",
"province)) def fix_canada_dates(x): return datetime.strptime(x, '%d-%m-%Y') def get_brazil(data_path: str, filter_: Union[dict, bool] =",
"that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] <",
"except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID vaccinations data.') pass for",
"df_combined['Province_State'].unique() sorted_dfs = [] for roi in rois: df_roi = df_combined[df_combined['Province_State'] == roi]",
"df.loc[first_non_zero, cum].max() # Find the bad entries and null the corresponding # cumulative",
"Full path to data directory. Returns: ctp_dfs (dict): Dictionary containing US States (keys)",
"None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of dates for scraping",
"URL try: df = pd.read_csv(url) # Download the data into a dataframe except",
"+ more_dfs) elif region == 'US': # Use state name as index #",
"str, plot: bool = False) -> None: \"\"\"Fix negative values in daily data.",
"+ df['new_deaths'] try: population = get_population_count(data_path, country) df['population'] = population except: pass #",
"< 0 df.loc[bad[bad].index, cum] = None # Protect against 0 null final value",
"inplace=True) # removing this column so # we can index into date on",
"the cumulative total of cases should not go from N to a value",
"testing data already included if 'vaccin' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop",
"Args: data_path (str): Full path to data directory. Returns: None \"\"\" url_template =",
"def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_:",
"produced by the functions above. Args: data_path (str): Full path to data directory.",
"cases where cum counts go missing and new counts get missed. New counts",
"# generate a list of dates for scraping start_dt = date(2020, 4, 12)",
"new_col = 'new_' + col try: start = df_tmp[df_tmp[cum_col] > 0].index.values[0] df_ffill =",
"has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find the bad entries",
"Whether to plot the changes. Returns: None \"\"\" csvs = [x for x",
"% kind] = -1 out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str):",
"'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO',",
"recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new uninfected calculation df =",
"= diff[diff < 0].index df.loc[neg_index, cum] += 1 else: break # Then repeat",
"so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col} data",
"'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could",
"= data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {}",
"more_dfs) elif region == 'US': # Use state name as index # for",
"go from N to a value less than N on a subsequent day.",
"df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new recover for df['new_uninfected'] = df['tmp_new_recover'] +",
"= pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID data.')",
"fnf_error: print(fnf_error, 'Could not add OWID global vaccines data.') pass for i in",
"data for Brazil. https://github.com/wcota/covid19br (<NAME>) \"\"\" url = \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url)",
"Our World In Data https://github.com/owid/covid-19-data Add columns to US csvs in data_path. \"\"\"",
"for i in to_remove: if i in rois: rois.remove(i) for roi in rois:",
"date(2020, 4, 12) # When JHU starts reporting end_dt = date.today() dates =",
"# trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data =",
"'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where column is",
"as fnf_error: print(fnf_error, 'Could not add OWID vaccinations data.') pass for i in",
"src_trim['dates2'] = src['Date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['ISO code'].values src_trim['cum_tests'] =",
"unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping",
"\"Vanuatu\"]: # skipping because bad data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv'",
"each country if country in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu',",
"df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new src_roi = src_trim[src_trim['region']",
"interpolator if np.isnan(df.loc[df.index[-1], cum]): df.loc[df.index[-1], cum] = maxx # Then run a loop",
"in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi, df, columns: list): \"\"\" There",
"'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] =",
"0 and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def fix_canada_dates(x): return",
"We don't want it to spike, and we don't want to miss new",
"= pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID global",
"add new src_roi = src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that match",
"\"\"\"Functions for getting data needed to fit the models.\"\"\" import bs4 from datetime",
"become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col} data to add for",
"0 if len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max() #",
"country if state in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']: print(\"Skipping {}\".format(state))",
"df['dates2'] = source['date'] df['cum_cases'] = source['totalCases'].values df['cum_deaths'] = source['deaths'].values df['cum_recover'] = source['recovered'].values df['new_cases']",
"models.\"\"\" import bs4 from datetime import datetime import matplotlib.pyplot as plt import numpy",
"bs4 from datetime import datetime import matplotlib.pyplot as plt import numpy as np",
"population = get_population_count(data_path, country) df['population'] = population except: pass # Fill NaN with",
"and perform new cases calculation, then merge those new cases back into dataframe.",
"desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']:",
"Handle cases where # cumulative counts decrease and new_tests becomes a large negative",
"filter_: Union[dict, bool] = True) -> None: \"\"\"Gets data from Johns Hopkins CSSEGIS",
"= df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, country) df['population'] = population except:",
"'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois': 'US_IL', 'Indiana': 'US_IN', 'Iowa': 'US_IA', 'Kansas': 'US_KS',",
"'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue",
"urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind == 'cases':",
"JHU starts reporting end_dt = date.today() dates = [] delta = end_dt -",
"Columbia': 'US_DC', 'Florida': 'US_FL', 'Georgia': 'US_GA', 'Guam': 'US_GU', 'Hawaii': 'US_HI', 'Idaho': 'US_ID', 'Illinois':",
"rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered'] for i in",
"from datetime import timedelta, date import pandas as pd pd.options.mode.chained_assignment = None #",
"while True: # Interpolates the cumulative column nulls to have # monotonic growth",
"pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No data for",
"we have data in the downloaded JHU files for that country if country",
"print(f'No {cum_col} data to add for {roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()]",
"df_combined.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global test results missing for: \") for",
"df_raw = pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {} for state in states:",
"{roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new",
"== state] # Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates)",
"as url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind == 'cases': source.drop('cases',",
"df[df[new] > 0].index has_negs = before.diff().min() < 0 if len(non_zeros) and has_negs: first_non_zero",
"the cumulative column nulls to have # monotonic growth after = df[cum].interpolate('pchip') diff",
"# first check if roi reports recovery data as recovered if source['recovered'].isnull().all() ==",
"data starting on Jan 22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases',",
"df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool = False)",
"= 'cum_' + col dummy_cum_col = 'dummy_' + cum_col new_col = 'new_' +",
"= pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return",
"kind in ['cases', 'mortality', 'recovered']: url_path = url_template % kind # Create the",
"Add to dictionary of dataframes # Generate a list of countries that have",
"'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined = df_combined.loc[:, ~df_combined.columns.str.contains('^Unnamed')] df_combined.to_csv(timeseries_path) # overwrite timeseries CSV",
"test results. Data is stored as a collection of CSVs per date containing",
"'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New",
"then merge those new cases back into dataframe. Args: roi (str): Region we",
"stored as a collection of CSVs per date containing states and test results.",
"dfs = [] # we will append dfs for cases, deaths, recovered here",
"add new df_roi_tests = df_tests[df_tests['roi'] == roi] # filter down to roi df_result",
"if df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s' for %s\" % (kind, roi))",
"> 0].index.values[0] df_ffill = df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] =",
"'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] ==",
"= ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS',",
"kind in 'recovered': continue is_c = df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T",
"'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, country) df['population'] =",
"else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting",
"non-negative assert after.diff().min() >= 0 # Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return",
"that have \"good\" data, # according to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_)",
"filter down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] <",
"are named 'People_Tested' for first 200 ish days # then cumulative tests are",
"False) -> pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives values for a single",
"roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests']",
"in ['global', 'US']: dfs[region] = {} for kind in ['confirmed', 'deaths', 'recovered']: url",
"df['cum_recover'].diff() df['new_uninfected'] = df['new_recover'] + df['new_deaths'] try: roi = 'BR_' + state_code[state] population",
"try: population = get_population_count(data_path, state) df['population'] = population except: pass dfs[state] = df",
"out = data_path / (csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files for",
"df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No data for %s\" % state)",
"\"\"\"Delete time-series files for regions no longer tracked, such as: Diamond Princess, MS",
"can index into date on all 3 dfs at same position source.rename(columns={source.columns[1]: \"date\"",
"if 'People_Tested' in df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests']",
"'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']: url_path = url_template % kind #",
"-1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col}",
"fix negatives values for a single region. This function uses monotonic spline interpolation",
"in US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries = pd.read_csv(timeseries_path,",
"0].index df.loc[neg_index, cum] += 1 else: break # Then repeat if plot: plt.figure()",
"on='dates2') df_combined['new_tests'] = df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 # Handle cases",
"Scotia', 'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data for",
"df.iloc[:-1] df = fix_neg(df, roi, plot=plot) df.to_csv(data_path / (csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi:",
"get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False) -> None: \"\"\" Scrape JHU for",
"== 0 df.loc[bad[bad].index, cum] = None # 2) New daily columns which are",
"(pd.DataFrame): DataFrame containing counts but not new counts. columns (list): List of columns",
"first differences are now all non-negative assert after.diff().min() >= 0 # Replace the",
"country df2 = df2.set_index('Country/Region') more_dfs.append(df2) df = pd.concat([df1] + more_dfs) elif region ==",
"'cum_tests', 'dates2']) df_trim['Province_State'] = df['Province_State'].values df_trim['dates2'] = fix_jhu_testing_dates(i) # handle cases where column",
"the full data URL try: df = pd.read_csv(url) # Download the data into",
"(str): Region. Returns: population (int): Population count for ROI (if exists). \"\"\" try:",
"point, increase the corresponding cumulative values by 1. neg_index = diff[diff < 0].index",
"dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] # for appending",
"# overwrite timeseries CSV print(\"OWID global vaccine results missing for: \") for roi",
"'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande do Norte', 'RO':'Rondonia', 'RR':'Roraima', 'RS':'Rio Grande",
"data for %s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date'] = pd.to_datetime(df_combined['dates2'])",
"pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates",
"src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['Alpha-3 code'] = src['iso_code'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values",
"src_trim[src_trim['Alpha-3 code'] == roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']],",
"'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths'] =",
"that cumulative counts are non-decreasing. Args: df (pd.DataFrame): DataFrame containing data for one",
"= df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 #",
"monotonic spline interpolation in between valid days of data. This only affects a",
"data and the difference is taken between a new cumulative count and -1.",
"in 'recovered': continue is_c = df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region']",
"df_raw[df_raw['state'] == state] # Only the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] =",
"5, 'recovered': 1, 'deaths': 0} COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0}",
"for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = [] # we will append dfs",
"Full path to data directory. Returns: None \"\"\" # Where JHU stores their",
"src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down",
"df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing, set",
"df['Province/State'].isnull() # Whole countries only; use country name as index df1 = df[has_no_province].set_index('Country/Region')",
"and new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered'] for",
"df['new_deaths'] try: population = get_population_count(data_path, country) df['population'] = population except: pass # Fill",
"overwrite timeseries CSV print(\"OWID global vaccine results missing for: \") for roi in",
"first_non_zero = non_zeros[0] maxx = df.loc[first_non_zero, cum].max() # Find the bad entries and",
"= after.diff() if diff.min() < 0: # If there are still negative first-differences",
"roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def dummy_cumulative_new_counts(roi,",
"if country == 'Canada' and kind in 'recovered': continue is_c = df['Country/Region'] ==",
"(str): Full path to data directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" #",
"'tests' in i: df_timeseries.drop([i], axis=1, inplace=True) # drop so we can add new",
"CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path to data directory.",
"str(x)] for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for",
"\"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because bad data continue try: timeseries_path",
"a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim",
"for ROI (if exists). \"\"\" try: # open population file df_pop = pd.read_csv(data_path",
"how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC',",
"= df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected',",
"= [x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond",
"except HTTPError: print(\"Could not download state-level data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas',",
"== roi] # filter down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1,",
"+ timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path:",
"state-level data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal',",
"when cumulative counts go to -1 for missing data and the difference is",
"rois and merge on csv to add cum_test and new_tests rois = df_tests.roi.unique().tolist()",
"HTTPError: print(\"Could not download data for %s, %s\" % (kind, region)) else: if",
"roi] # filter down to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True)",
"each province for province in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province]",
"% kind].sum() == 0: print(\"Negifying 'new_%s' for %s\" % (kind, roi)) df['new_%s' %",
"as np import pandas as pd import requests from tqdm import tqdm from",
"NaN with 0 and convert to int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv'",
"'recovered': continue is_c = df['Country/Region'] == country df2 = df[is_c].sum(axis=0, skipna=False).to_frame().T df2['Country/Region'] =",
"maxx # Then run a loop which: while True: # Interpolates the cumulative",
"'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/testing/covid-testing-all-observations.csv' src = pd.read_csv(url) roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict()",
"str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi))",
"population_estimates.csv in data-path\") try: population = df_pop.query('roi == \"{}\"'.format(roi))['population'].values except: print(\"{} population estimate",
"If cumulative counts are missing, set new counts to -1 so they don't",
"state in states: # For each country source = df_raw[df_raw['state'] == state] #",
"int dfs[country] = df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No data for",
"convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str, filter_: Union[dict,",
"dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'):",
"sure the first differences are now all non-negative assert after.diff().min() >= 0 #",
"data. This only affects a small number of regions. It overwrites the original",
"'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN',",
"Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro', 'RN':'Rio Grande",
"= df # Add to dictionary of dataframes # Generate a list of",
"FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID vaccinations data.') pass for i",
"path to data directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a",
"= population except: print(\"Could not add population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True)",
"% country) source = dfs['US'] states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state",
"they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col} data to",
"column is people_tested and then switches to Total_Test_Results if 'People_Tested' in df.columns: df_trim['cum_tests']",
"rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns",
"bool = False) -> None: \"\"\" Get global vaccines data from Our World",
"roi in US_STATE_ABBREV: try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % US_STATE_ABBREV[roi]) df_timeseries =",
"dfs[2] # combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths,",
"not download state-level data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara',",
"Full path to data directory. roi (str): Region. Returns: population (int): Population count",
"= np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to string",
"label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s %s Raw vs Fixed R=%.5g\" %",
"diff = after.diff() if diff.min() < 0: # If there are still negative",
"'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover', 'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:,",
"and now new counts. \"\"\" dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for",
"pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format # first check if roi",
"'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] =",
"print(\"{} population estimate not found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets",
"index df1 = df[has_no_province].set_index('Country/Region') more_dfs = [] for country in ['China', 'Canada', 'Australia']:",
"pd.DataFrame: \"\"\"Used by `fix_negatives` to fix negatives values for a single region. This",
"= date(2020, 4, 12) # When JHU starts reporting end_dt = date.today() dates",
"CSVs per date containing states and test results. Args: data_path (str): Full path",
"roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests']",
"df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could not get tests data for {roi}.')",
"'US_MI' or 'Greece'. columns (list, optional): Columns to make non-decreasing. Defaults to ['cases',",
"Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana', 'RJ':'Rio de Janeiro',",
"inplace=True) provinces = ['Alberta', 'BC', 'Manitoba', 'New Brunswick', 'NL', 'Nova Scotia', 'Nunavut', 'NWT',",
"# Whole countries only; use country name as index df1 = df[has_no_province].set_index('Country/Region') more_dfs",
"df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths',",
"df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to string df['dates2'] =",
"'Nunavut', 'NWT', 'Ontario', 'PEI', 'Quebec', 'Saskatchewan', 'Yukon'] # Export timeseries data for each",
"new counts for. Returns: df_fixed (pd.DataFrame): DataFrame containing cumulative and now new counts.",
"df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If cumulative counts are missing, set new counts to",
"= df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool]",
"'Northern Mariana Islands':'US_MP', 'Ohio': 'US_OH', 'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico':",
"not add population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime",
"of countries from a global dataframe optionally passing a quality check Args: d",
"to roi df_result = df_timeseries.merge(df_roi_tests, on='dates2', how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests']",
"for kind in ['confirmed', 'deaths', 'recovered']: url = url_template % (kind, region) #",
"Scrape JHU for US State level test results. Data is stored as a",
"set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\" % ','.join(good)) # print(\"JHU data not",
"'cum_recover', 'new_cases', 'new_deaths', 'new_recover', 'new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] =",
"Sul', 'SC':'Santa Catarina', 'SE':'Sergipe', 'SP':'Sao Paulo', 'TO':'Tocantins'} for state in tqdm(state_code, desc='Brazilian States'):",
"src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: #",
"df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path: str, filter_: Union[dict, bool] =",
"dates to string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0 and convert",
"add OWID vaccinations data.') pass for i in df_timeseries.columns: # Check if OWID",
"Data https://github.com/owid/covid-19-data Add columns to US csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv'",
"# being non-zero bad = df.loc[first_non_zero:, cum] == 0 df.loc[bad[bad].index, cum] = None",
"source['cumulative_recovered'].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] +",
"data.') pass for i in df_timeseries.columns: # Check if OWID testing data already",
"for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito Santo',",
"to create state level data df = df[[x for x in df if",
"= df_combined['Province_State'].unique() sorted_dfs = [] for roi in rois: df_roi = df_combined[df_combined['Province_State'] ==",
"= pd.read_csv(csv) # Exclude final day because it is often a partial count.",
"df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global vaccine results missing for:",
"the first differences are now all non-negative assert after.diff().min() >= 0 # Replace",
"population except: pass # Fill NaN with 0 and convert to int dfs[country]",
"'Virgin Islands': 'US_VI', 'Virginia': 'US_VA', 'Washington': 'US_WA', 'West Virginia': 'US_WV', 'Wisconsin': 'US_WI', 'Wyoming':",
"path to data directory. Returns: ctp_dfs (dict): Dictionary containing US States (keys) and",
"in range(int ((date2 - date1).days)+1): yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y =",
"Add 'US_' to abbrev df.replace(US_STATE_ABBREV, inplace=True) df = df.set_index('Province_State') df = df.groupby('Province_State').sum() #",
"from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str): Full path",
"'new_recover', 'new_uninfected']) df['dates2'] = source['date'].apply(fix_canada_dates) # Convert date format df['cum_cases'] = source['cumulative_cases'].values df['cum_deaths']",
"'US_AS']: print(\"Skipping {}\".format(state)) continue # If we have data in the downloaded JHU",
"cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:,",
"url_template % kind # Create the full data URL with urllib.request.urlopen(url_path) as url:",
"counts go to -1 for missing data and the difference is taken between",
"pd.read_csv(archived_data) states = df_raw['state'].unique() ctp_dfs = {} for state in states: # For",
"number of regions. It overwrites the original .csv files produced by the functions",
"desc='Brazilian States'): source = df_raw[df_raw['state'] == state] # Only the given province df",
"generate a list of dates for scraping start_dt = date(2020, 4, 12) #",
"\"\"\"Gets data from Johns Hopkins CSSEGIS (countries only). https://coronavirus.jhu.edu/map.html https://github.com/CSSEGISandData/COVID-19 Args: data_path (str):",
"0: # If there are still negative first-differences at this # point, increase",
"name as index # for k, v in US_STATE_ABBREV.items(): # get US state",
"values by 1. neg_index = diff[diff < 0].index df.loc[neg_index, cum] += 1 else:",
"recovery data as hospitalizedDischarged elif source['hospitalizedDischarged'].isnull().all() == False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover']",
"= \"https://raw.githubusercontent.com/wcota/covid19br/master/cases-brazil-states.csv\" try: df_raw = pd.read_csv(url) except HTTPError: print(\"Could not download state-level data",
"not add OWID vaccinations data.') pass for i in df_timeseries.columns: # Check if",
"Mexico': 'US_NM', 'New York': 'US_NY', 'North Carolina': 'US_NC', 'North Dakota': 'US_ND', 'Northern Mariana",
"{roi}.') def daterange(date1, date2): for n in range(int ((date2 - date1).days)+1): yield date1",
"the difference is taken between a new cumulative count and -1. We don't",
"and new_tests becomes a large negative number df_combined[['cum_tests', 'new_tests']] = df_combined[['cum_tests', 'new_tests']].fillna(-1).astype(int).values df_combined",
"try: for i in df_timeseries.columns: # Check if testng data already included if",
"\"good\" data, # according to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For",
"~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV except: print(f'Could not get tests data for",
"df_new.fillna(-1).astype(int) df_fixed = df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_:",
"file. source = dfs['global'] for country in tqdm(good_countries, desc='Countries'): # For each country",
"= pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID vaccinations",
"'new_uninfected', 'cum_tests', 'new_tests', 'population']].copy() df_result_trim = df_result_trim.loc[:, ~df_result_trim.columns.str.contains('^Unnamed')] df_result_trim.to_csv(csv_path) # overwrite timeseries CSV",
"'Iowa': 'US_IA', 'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts':",
"= 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated'])",
"CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str,",
"Canada's Open Covid group for Canadian Provinces. https://opencovid.ca/ \"\"\" dfs = [] #",
"in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str,",
"skipping because bad data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi])",
"pd.options.mode.chained_assignment = None # default='warn' JHU_FILTER_DEFAULTS = {'confirmed': 5, 'recovered': 1, 'deaths': 0}",
"dfs = {} for region in ['global', 'US']: dfs[region] = {} for kind",
"a small number of regions. It overwrites the original .csv files produced by",
"for %s\" % ','.join(bad)) return good def get_population_count(data_path:str, roi): \"\"\" Check if we",
"data for {roi}.') def daterange(date1, date2): for n in range(int ((date2 - date1).days)+1):",
"'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country)) continue # If we have data in the downloaded",
"fit the models.\"\"\" import bs4 from datetime import datetime import matplotlib.pyplot as plt",
"csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found",
"x in data_path.iterdir() if 'covidtimeseries' in str(x)] rois_to_remove = ['Diamond Princess', 'Grand Princess',",
"list = ['cases', 'deaths', 'recover'], plot: bool = False) -> pd.DataFrame: \"\"\"Used by",
"download tests data for %s\" % i) df_combined = pd.concat(dfs) df_combined.sort_values(by='Province_State', inplace=True) df_combined['Date']",
"state] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths',",
"- start_dt delta = delta.days for dt in daterange(start_dt, end_dt): dates.append(dt.strftime(\"%m-%d-%Y\")) # cumulative",
"df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where # cumulative counts",
"df = pd.read_csv(url) # Download the data into a dataframe except HTTPError: print(\"Could",
"if roi not in src_rois: unavailable_testing_data.append(roi) continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\",",
"estimate not found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US",
"country)) else: print(\"No data for %s\" % country) source = dfs['US'] states =",
"inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True) # fix NY name src_rois",
"= df['new_recover'] + df['new_deaths'] try: population = get_population_count(data_path, 'CA_' + province) df['population'] =",
"df_roi['new_tests'] = df_roi['cum_tests'].diff().fillna(-1).astype(int) sorted_dfs.append(df_roi) df_tests = pd.concat(sorted_dfs) df_tests.reset_index(inplace=True, drop=True) df_tests.replace(US_STATE_ABBREV, inplace=True) df_tests.rename(columns={'Province_State': 'roi'},",
"from tqdm import tqdm from typing import Union from urllib.error import HTTPError import",
"(str): Full path to data directory. Returns: ctp_dfs (dict): Dictionary containing US States",
"f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not found in data path.\") try:",
"on=['date', 'province'], how='outer') df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces =",
"at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases = dfs[0] deaths =",
"(csv.name.split('.')[0]+'.csv')) def fix_neg(df: pd.DataFrame, roi: str, columns: list = ['cases', 'deaths', 'recover'], plot:",
"Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for c in columns:",
"-1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1)",
"= {} for region in ['global', 'US']: dfs[region] = {} for kind in",
"src_trim['region'].unique() for roi in src_rois: if roi in US_STATE_ABBREV: try: timeseries_path = data_path",
"print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove {}. Check that path",
"+= 1 else: break # Then repeat if plot: plt.figure() plt.plot(df.index, before, label='raw')",
"Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati', 'Palau'] for csv in",
"good.intersection(enough) bad = set(d['confirmed'].index).difference(good) # print(\"JHU data acceptable for %s\" % ','.join(good)) #",
"df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where #",
"= pd.concat([df1] + more_dfs) elif region == 'US': # Use state name as",
"= df[df[new] > 0].index has_negs = before.diff().min() < 0 if len(non_zeros) and has_negs:",
"csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2',",
"kind in ['confirmed', 'deaths', 'recovered']: url = url_template % (kind, region) # Create",
"'%m/%d/%y') return datetime.strftime(y, '%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_:",
"'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected']) df['dates2'] = source['confirmed'].columns df['dates2'] = df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[state].values df['cum_deaths']",
"we have data in the downloaded JHU files for that country if state",
"days of data across all states'): url = url_template % i try: df",
"df['population'] = population except: pass dfs[state] = df dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state))",
"src['people_fully_vaccinated'].values src_trim.set_index('dates2', inplace=True, drop=True) src_trim.replace(\"New York State\", \"New York\", inplace=True) # fix NY",
"country if country in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'cum_recover', 'new_cases', 'new_deaths',",
"US_AS (American Somoa)\"\"\" csvs = [x for x in data_path.iterdir() if 'covidtimeseries' in",
"1)[-1] if roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv)",
"missing for: \") for roi in roi_codes_dict: if roi in unavailable_testing_data: print(roi_codes_dict[roi], end=\"",
"'Canada' and kind in 'recovered': continue is_c = df['Country/Region'] == country df2 =",
"source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff() df['new_uninfected'] = df['new_recover'] +",
"cumulative counts are missing, set new counts to -1 so they don't become",
"for i in df_timeseries.columns: # Check if testng data already included if 'tests'",
"'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY',",
"= False) -> None: \"\"\" Scrape JHU for US State level test results.",
"with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source = pd.json_normalize(data[kind]) if kind ==",
"how='left') df_result.fillna(-1, inplace=True) df_result.loc[df_result['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where",
"new_tests becomes a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests',",
"the given state df = pd.DataFrame(columns=['dates2','cum_recover','new_recover']) df['dates2'] = source['date'].apply(fix_ct_dates) # Convert date format",
"new counts. \"\"\" dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col in",
"= df['dates2'].apply(fix_jhu_dates) df['cum_cases'] = source['confirmed'].loc[country].values df['cum_deaths'] = source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths',",
"population estimate not found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived",
"Union from urllib.error import HTTPError import urllib.request, json import os from datetime import",
"df.join(df_new) df_fixed = df_fixed.fillna(-1).astype(int) return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] =",
"bool] = True, fixes: bool = False) -> None: \"\"\" Gets data from",
"# Replace the values df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) ->",
"new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered'] for i",
"fnf_error: print(fnf_error, 'Could not add OWID vaccinations data.') pass for i in df_timeseries.columns:",
"population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before",
"all states'): url = url_template % i try: df = pd.read_csv(url) df_trim =",
"'US_AL', 'Alaska': 'US_AK', 'American Samoa': 'US_AS', 'Arizona': 'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado':",
"CSV print(\"OWID global test results missing for: \") for roi in roi_codes_dict: if",
"return df def negify_missing(data_path: str) -> None: \"\"\"Fix negative values in daily data.",
"rois_to_remove = ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US',",
"recovered = dfs[2] # combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw",
"source = df_raw[df_raw['state'] == state] # Only the given province df = pd.DataFrame(columns=['dates2','cum_cases',",
"'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri': 'US_MO', 'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV',",
"script fixes this by nulling such data and applying a monotonic spline interpolation",
"data directory. Returns: None \"\"\" url_template = \"https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_daily_reports_us/%s.csv\" # generate a list of",
"as plt import numpy as np import pandas as pd import requests from",
"print(f\"{csv_path} not found in data path.\") try: for i in df_timeseries.columns: # Check",
"roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois =",
"'US_AZ', 'Arkansas': 'US_AR', 'California': 'US_CA', 'Colorado': 'US_CO', 'Connecticut': 'US_CT', 'Delaware': 'US_DE', 'District of",
"(str): Full path to data directory. roi (str): Region. Returns: population (int): Population",
"Args: data_path (str): Full path to data directory. Returns: ctp_dfs (dict): Dictionary containing",
"a value less than N on a subsequent day. This script fixes this",
"taken between a new cumulative count and -1. We don't want it to",
"a monotonic spline interpolation in between valid days of data. This only affects",
"plot the changes. Returns: None \"\"\" csvs = [x for x in data_path.iterdir()",
"pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe src_trim = pd.DataFrame(columns=['dates2','Alpha-3 code','cum_tests']) src_trim['dates2'] =",
"df_raw = df_rawtemp.merge(deaths, on=['date', 'province'], how='outer') df_raw.fillna(0, inplace=True) provinces = ['Alberta', 'BC', 'Manitoba',",
"cumulative column nulls to have # monotonic growth after = df[cum].interpolate('pchip') diff =",
"'Canada', 'Australia']: if country == 'Canada' and kind in 'recovered': continue is_c =",
"here # URL for API call to get Province-level timeseries data starting on",
"found in population_estimates.csv\".format(args.roi)) return int(population) def covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data",
"continue if roi_codes_dict[roi] in [\"US\", \"Marshall Islands\", \"Micronesia\", \"Samoa\", \"Vanuatu\"]: # skipping because",
"None # Protect against 0 null final value which screws up interpolator if",
"[x for x in data_path.iterdir() if 'covidtimeseries' in str(x)] for csv in tqdm(csvs,",
"{cum_col} data to add for {roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] #",
"= before.diff().min() < 0 if len(non_zeros) and has_negs: first_non_zero = non_zeros[0] maxx =",
"overwrite timeseries CSV except: print(f'Could not get tests data for {roi}.') def daterange(date1,",
"Project. https://covidtracking.com Args: data_path (str): Full path to data directory. Returns: ctp_dfs (dict):",
"after = df[cum].interpolate('pchip') diff = after.diff() if diff.min() < 0: # If there",
"filter_: Union[dict, bool] = True, fixes: bool = False) -> None: \"\"\" Gets",
"'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan': 'US_MI', 'Minnesota': 'US_MN', 'Mississippi': 'US_MS', 'Missouri':",
"end=\" \") print(\"\") def get_owid_global_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool",
"if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values # check if roi reports recovery",
"state in tqdm(state_code, desc='Brazilian States'): source = df_raw[df_raw['state'] == state] # Only the",
"'global': has_no_province = df['Province/State'].isnull() # Whole countries only; use country name as index",
"for csv in tqdm(csvs, desc=\"Regions\"): roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind",
"This only affects a small number of regions. It overwrites the original .csv",
"# For each \"good\" country, # reformat and save that data in its",
"= df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) # overwrite timeseries CSV print(\"OWID global vaccine results missing",
"df = df.merge(us_recovery_data[state], on='dates2', how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new recover",
"of countries that have \"good\" data, # according to these criteria: good_countries =",
"in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim) except HTTPError: print(\"Could not download tests data",
"pd.read_csv(timeseries_path, index_col='dates2') except FileNotFoundError as fnf_error: print(fnf_error, 'Could not add OWID data.') pass",
"'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi], df_combined, cum_vacc_columns) df = df.loc[:, ~df.columns.str.contains('^Unnamed')] df.to_csv(timeseries_path) #",
"date on all 3 dfs at same position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source)",
"== 'Canada' and kind in 'recovered': continue is_c = df['Country/Region'] == country df2",
"data directory. Returns: None \"\"\" # Where JHU stores their data url_template =",
"# drop so we can add new src_roi = src_trim[src_trim['region'] == roi] #",
"province in tqdm(provinces, desc='Canadian Provinces'): source = df_raw[df_raw['province'] == province] # Only the",
"Data is stored as a collection of CSVs per date containing states and",
"now new counts. \"\"\" dfs = [] df_tmp = df.copy() df_tmp.reset_index(inplace=True) for col",
"data for Brazil\") state_code = {'AC':'Acre', 'AL':'Alagoas', 'AM':'Amazonas', 'AP':'Amapa', 'BA':'Bahia','CE':'Ceara', 'DF':'Distrito Federal', 'ES':'Espirito",
"pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values",
"{}\".format(state)) continue # If we have data in the downloaded JHU files for",
"Use only data columns # 20 or 21 signifies 2020 or 2021 dfs[region][kind]",
"US csvs in data_path. \"\"\" url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim =",
"differences are now all non-negative assert after.diff().min() >= 0 # Replace the values",
"This script fixes this by nulling such data and applying a monotonic spline",
"df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort by datetime obj before converting to",
"sure that cumulative counts are non-decreasing. Args: df (pd.DataFrame): DataFrame containing data for",
"roi = str(csv).split('.')[0].split('_', 1)[-1] if roi in rois_to_remove: try: if os.path.exists(csv): print(\"Removing {}",
"don't want to miss new counts before the gap. So create a dummy",
"\"Samoa\", \"Vanuatu\"]: # skipping because no data continue try: timeseries_path = data_path /",
"containing counts but not new counts. columns (list): List of columns (without cum_",
"4, 12) # When JHU starts reporting end_dt = date.today() dates = []",
"It overwrites the original .csv files produced by the functions above. Args: data_path",
"JHU_FILTER_DEFAULTS if filter_: for key, minimum in filter_.items(): enough = d[key].index[d[key].max(axis=1) >= minimum].tolist()",
"COVIDTRACKER_FILTER_DEFAULTS = {'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL',",
"(str): One region, e.g 'US_MI' or 'Greece'. columns (list, optional): Columns to make",
"try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries = pd.read_csv(timeseries_path, index_col='dates2') except",
"cum_test and new_tests rois = df_tests.roi.unique().tolist() to_remove = ['Diamond Princess', 'Grand Princess', 'Recovered']",
"'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM', 'New York': 'US_NY', 'North",
"df[[x for x in df if any(year in x for year in ['20',",
"how='left') df['tmp_new_recover'] = df['new_recover'].fillna(0).astype(int) # create temp new recover for df['new_uninfected'] = df['tmp_new_recover']",
"'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths', 'cum_recover', 'new_recover',",
"inplace=True) # now open csvs in data_path that match rois and merge on",
"k, v in US_STATE_ABBREV.items(): # get US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'):",
"downloaded JHU files for that country if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2',",
"src_trim[src_trim['region'] == roi] # filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_vaccinations', 'daily_vaccinations',",
"data. The purpose of this script is to fix spurious negative values in",
"'Total_Test_Results' after 200 ish days dfs = [] for i in tqdm(dates, desc=f'Scraping",
"src_rois = src_trim['region'].unique() for roi in src_rois: if roi in US_STATE_ABBREV: try: timeseries_path",
"'cum_' + col dummy_cum_col = 'dummy_' + cum_col new_col = 'new_' + col",
"Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'US_AS', 'Micronesia', 'Kiribati',",
"return df_fixed def get_owid_us_vaccines(data_path: str, filter_: Union[dict, bool] = True, fixes: bool =",
"pd import requests from tqdm import tqdm from typing import Union from urllib.error",
"global vaccines data.') pass for i in df_timeseries.columns: # Check if OWID testing",
"For each country if state in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'US_AS']:",
"bad data continue try: timeseries_path = data_path / ('covidtimeseries_%s.csv' % roi_codes_dict[roi]) df_timeseries =",
"to plot the changes. Returns: None \"\"\" csvs = [x for x in",
"isinstance(filter_, dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for key, minimum in filter_.items(): enough",
"day. This script fixes this by nulling such data and applying a monotonic",
"cum] = None # Protect against 0 null final value which screws up",
"down source dataframe src_trim.set_index('dates2',inplace=True, drop=True) src_rois = src_trim['Alpha-3 code'].unique() unavailable_testing_data = [] #",
"population except: print(\"Could not add population data for {}\".format(state)) pass df.sort_values(by=['dates2'], inplace=True) #",
"Federal', 'ES':'Espirito Santo', 'GO':'Goias', 'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso',",
"data to add for {roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix",
"diff[diff < 0].index df.loc[neg_index, cum] += 1 else: break # Then repeat if",
"for i in tqdm(dates, desc=f'Scraping {delta} days of data across all states'): url",
"{'cum_cases': 5, 'cum_recover': 1, 'cum_deaths': 0} US_STATE_ABBREV = { 'Alabama': 'US_AL', 'Alaska': 'US_AK',",
"== False: df['cum_recover'] = source['recovered'].values # check if roi reports recovery data as",
"'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'Alpha-3 code', 'cum_vaccinations', 'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated'])",
"# convert dates to string df = df.set_index('dates2').fillna(0).astype(int) # Fill NaN with 0",
"\"\"\"Get a list of countries from a global dataframe optionally passing a quality",
"= df_tmp.iloc[start:] df_ffill.set_index('dates2', drop=True, inplace=True) df_ffill[dummy_cum_col] = df_ffill[cum_col].ffill().astype(int).values df_ffill[new_col] = df_ffill[dummy_cum_col].diff().astype('Int64') # If",
"column nulls to have # monotonic growth after = df[cum].interpolate('pchip') diff = after.diff()",
"rois: csv_path = data_path / f'covidtimeseries_{roi}.csv' try: df_timeseries = pd.read_csv(csv_path) except: print(f\"{csv_path} not",
"except: pass # Fill NaN with 0 and convert to int dfs[country] =",
"= pd.read_csv(data_path / 'country_iso_codes.csv') roi_codes_dict = pd.Series(roi_codes.Country.values,index=roi_codes['Alpha-3 code']).to_dict() # trim down source dataframe",
"don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col} data to add",
"fix spurious negative values in new daily numbers. For example, the cumulative total",
"fixes this by nulling such data and applying a monotonic spline interpolation in",
"'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated', 'people_fully_vaccinated']) src_trim['dates2'] = src['date'].apply(fix_owid_dates).values # fix dates src_trim['region'] = src['location'].values",
"0, 'new_tests'] = -1 # Handle cases where # cumulative counts decrease and",
"= True, fixes: bool = False) -> None: \"\"\" Gets data from Canada's",
"reformat and save that data in its own .csv file. source = dfs['global']",
"df.set_index('dates2').fillna(0).astype(int) dfs[country].to_csv(data_path / ('covidtimeseries_%s.csv' % country)) else: print(\"No data for %s\" % country)",
"'Kansas': 'US_KS', 'Kentucky': 'US_KY', 'Louisiana': 'US_LA', 'Maine': 'US_ME', 'Maryland': 'US_MD', 'Massachusetts': 'US_MA', 'Michigan':",
"fix duplication issue dfs.append(df_ffill[new_col]) df_new = pd.concat(dfs, axis=1) df_new = df_new.fillna(-1).astype(int) df_fixed =",
"in tqdm(states, desc='US States'): # For each country if state in ['Diamond Princess',",
"position source.rename(columns={source.columns[1]: \"date\" }, inplace=True) dfs.append(source) cases = dfs[0] deaths = dfs[1] recovered",
"yield date1 + timedelta(n) def fix_jhu_testing_dates(x): y = datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y')",
"missing, set new counts to -1 so they don't become 0. df_ffill.loc[df_ffill[cum_col].isnull(), new_col]",
"first check if roi reports recovery data as recovered if source['recovered'].isnull().all() == False:",
"= src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] = src['people_vaccinated'].values src_trim['cum_people_fully_vaccinated'] = src['people_fully_vaccinated'].values roi_codes = pd.read_csv(data_path / 'country_iso_codes.csv')",
"States'): # For each country if state in ['Diamond Princess', 'Grand Princess', 'MS",
"for that country if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases', 'cum_deaths', 'new_cases','new_deaths','new_uninfected'])",
"inplace=True) dfs.append(source) cases = dfs[0] deaths = dfs[1] recovered = dfs[2] # combine",
"containing data for one region. roi (str): One region, e.g 'US_MI' or 'Greece'.",
"22 2020 url_template = 'https://api.opencovid.ca/timeseries?stat=%s&loc=prov&date=01-22-2020' for kind in ['cases', 'mortality', 'recovered']: url_path =",
"the functions above. Args: data_path (str): Full path to data directory. plot (bool):",
"= source['deaths'].loc[country].values df['cum_recover'] = source['recovered'].loc[country].values df[['new_cases', 'new_deaths', 'new_recover']] = \\ df[['cum_cases', 'cum_deaths', 'cum_recover']].diff()",
"'Montana': 'US_MT', 'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New",
"data dfs = {} for region in ['global', 'US']: dfs[region] = {} for",
"to make non-decreasing. Defaults to ['cases', 'deaths', 'recover']. Returns: pd.DataFrame: [description] \"\"\" for",
"{} from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove {}. Check that path is",
"Union[dict, bool] = False) -> None: \"\"\" Scrape JHU for US State level",
"function uses monotonic spline interpolation to make sure that cumulative counts are non-decreasing.",
"gap. So create a dummy dataframe with forward filled cumulative counts and perform",
"DataFrame containing data for one region. roi (str): One region, e.g 'US_MI' or",
"plt.title(\"%s %s Raw vs Fixed R=%.5g\" % (roi, c, r)) plt.legend() else: after",
"['20', '21'])]] # Use only data columns # 20 or 21 signifies 2020",
"reports recovery data as recovered if source['recovered'].isnull().all() == False: df['cum_recover'] = source['recovered'].values #",
"US_STATE_ABBREV.items(): # get US state abbrev # if not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] =",
"Create the full data URL with urllib.request.urlopen(url_path) as url: data = json.loads(url.read().decode()) source",
"\"\"\" Scrape JHU for US State level test results. Data is stored as",
"= df[has_no_province].set_index('Country/Region') more_dfs = [] for country in ['China', 'Canada', 'Australia']: if country",
"per date containing states and test results. Args: data_path (str): Full path to",
"calculation, then merge those new cases back into dataframe. Args: roi (str): Region",
"roi = str(csv).split('.')[0].split('_')[-1] df = pd.read_csv(csv) for kind in ['cases', 'deaths', 'recover']: if",
"'Oklahoma': 'US_OK', 'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South",
"(str): Full path to data directory. Returns: None \"\"\" # Where JHU stores",
"these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For each \"good\" country, # reformat",
"except: print(f'Could not get tests data for {roi}.') def daterange(date1, date2): for n",
"states = source['confirmed'].index.tolist() us_recovery_data = covid_tracking_recovery(data_path) for state in tqdm(states, desc='US States'): #",
"(if exists). \"\"\" try: # open population file df_pop = pd.read_csv(data_path / 'population_estimates.csv')",
"'recovered']: url = url_template % (kind, region) # Create the full data URL",
"= dfs[2] # combine dfs df_rawtemp = cases.merge(recovered, on=['date', 'province'], how='outer') df_raw =",
"NaN with 0 and convert to int df.to_csv(data_path / ('covidtimeseries_CA_%s.csv' % province)) def",
"covid_tracking_recovery(data_path: str): \"\"\"Gets archived US recovery data from The COVID Tracking Project. https://covidtracking.com",
"'Oregon': 'US_OR', 'Pennsylvania': 'US_PA', 'Puerto Rico': 'US_PR', 'Rhode Island': 'US_RI', 'South Carolina': 'US_SC',",
"optionally passing a quality check Args: d (pd.DataFrame): Data from JHU tracker (e.g.",
"(csv.name.split('.')[0]+'.csv') df.to_csv(out) def remove_old_rois(data_path: str): \"\"\"Delete time-series files for regions no longer tracked,",
"df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2', 'cum_cases', 'new_cases', 'cum_deaths', 'new_deaths',",
"from datetime import datetime import matplotlib.pyplot as plt import numpy as np import",
"0 and convert to int df.to_csv(data_path / ('covidtimeseries_BR_%s.csv' % state_code[state])) def get_owid_tests(data_path: str,",
"where column is people_tested and then switches to Total_Test_Results if 'People_Tested' in df.columns:",
"dictionary of dataframes # Generate a list of countries that have \"good\" data,",
"after.diff() if diff.min() < 0: # If there are still negative first-differences at",
"df['new_recover'] = df['cum_recover'].diff() ctp_dfs['US_'+state] = df return ctp_dfs def get_canada(data_path: str, filter_: Union[dict,",
"None: \"\"\" Get global vaccines data from Our World In Data https://github.com/owid/covid-19-data Add",
"# according to these criteria: good_countries = get_countries(dfs['global'], filter_=filter_) # For each \"good\"",
"Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall Islands', 'US', 'Micronesia','Kiribati']: print(\"Skipping {}\".format(country))",
"counties to create state level data df = df[[x for x in df",
"df['cum_recover'] = source['recovered'].values # check if roi reports recovery data as hospitalizedDischarged elif",
"df_ffill.loc[df_ffill[cum_col].isnull(), new_col] = -1 except: print(f'No {cum_col} data to add for {roi}.') df_ffill[new_col]",
"not US_STATE_ABBREV[k].startswith('US_'): # US_STATE_ABBREV[k] = 'US_' + v # Add 'US_' to abbrev",
"break # Then repeat if plot: plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed')",
"= get_population_count(data_path, roi) df['population'] = population except: print(\"Could not add population data for",
"dfs[state].to_csv(data_path / ('covidtimeseries_%s.csv' % state)) else: print(\"No data for %s\" % state) def",
"# create temp new recover for df['new_uninfected'] = df['tmp_new_recover'] + df['new_deaths'] # new",
"add for {roi}.') df_ffill[new_col] = -1 df_ffill = df_ffill[~df_ffill.index.duplicated()] # fix duplication issue",
"JHU files for that country if state in source['confirmed'].index: df = pd.DataFrame(columns=['dates2', 'cum_cases',",
"For example, the cumulative total of cases should not go from N to",
"[] # we will append dfs for cases, deaths, recovered here # URL",
"kind in ['cases', 'deaths', 'recover']: if df['cum_%s' % kind].sum() == 0: print(\"Negifying 'new_%s'",
"data (values). \"\"\" archived_data = data_path / 'covid-tracking-project-recovery.csv' df_raw = pd.read_csv(archived_data) states =",
"= df[cum].interpolate('pchip') diff = after.diff() if diff.min() < 0: # If there are",
"# filter rows that match roi df_combined = df_timeseries.merge(src_roi[['cum_tests']], how='left', on='dates2') df_combined['new_tests'] =",
"'Nebraska': 'US_NE', 'Nevada': 'US_NV', 'New Hampshire': 'US_NH', 'New Jersey': 'US_NJ', 'New Mexico': 'US_NM',",
"'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui', 'PR':'Parana',",
"from Our World In Data https://github.com/owid/covid-19-data Add columns cum_tests and new_tests to csvs",
"Zaandam, Samoa, Vanuatu, Marshall Islands, US, US_AS (American Somoa)\"\"\" csvs = [x for",
"country if country in ['Diamond Princess', 'Grand Princess', 'MS Zaandam', 'Samoa', 'Vanuatu', 'Marshall",
"try: if os.path.exists(csv): print(\"Removing {} from data_path\".format(roi)) os.remove(csv) except: print(\"could not remove {}.",
"i in tqdm(dates, desc=f'Scraping {delta} days of data across all states'): url =",
"JHU stores their data url_template = (\"https://raw.githubusercontent.com/CSSEGISandData/\" \"COVID-19/master/csse_covid_19_data/\" \"csse_covid_19_time_series/time_series_covid19_%s_%s.csv\") # Scrape the data",
"negative bad = df.loc[first_non_zero:, new] < 0 df.loc[bad[bad].index, cum] = None # Protect",
"csvs in data_path that match rois and merge on csv to add cum_test",
"overwrite timeseries CSV def fix_owid_dates(x): y = datetime.strptime(x, '%Y-%m-%d') return datetime.strftime(y, '%m/%d/%y') def",
"values df[new] = df[cum].diff().fillna(0).astype(int).values return df def negify_missing(data_path: str) -> None: \"\"\"Fix negative",
"fixes: bool = False) -> None: \"\"\" Get US vaccines data from Our",
"get_population_count(data_path, 'CA_' + province) df['population'] = population except: pass df.sort_values(by=['dates2'], inplace=True) # sort",
"'MA':'Maranhao', 'MG':'Minas Gerais', 'MS':'Mato Grosso do Sul', 'MT':'Mato Grosso', 'PA':'Para', 'PB':'Paraiba', 'PE':'Pernambuco', 'PI':'Piaui',",
"'daily_vaccinations', 'cum_people_vaccinated', 'cum_people_fully_vaccinated']], how='left', on='dates2') cum_vacc_columns = ['vaccinations', 'people_vaccinated', 'people_fully_vaccinated'] df = dummy_cumulative_new_counts(roi_codes_dict[roi],",
"a dummy dataframe with forward filled cumulative counts and perform new cases calculation,",
"becomes a large negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int)",
"datetime.strptime(x, '%m-%d-%Y') return datetime.strftime(y, '%m/%d/%y') def fix_negatives(data_path: str, plot: bool = False) ->",
"negative number df_result['new_tests'] = df_result['new_tests'].astype(int) df_result[['cum_tests', 'new_tests']] = df_result[['cum_tests', 'new_tests']].astype(int) df_result_trim = df_result[['dates2',",
"'%m/%d/%y') def fix_negatives(data_path: str, plot: bool = False) -> None: \"\"\"Fix negative values",
"plt.figure() plt.plot(df.index, before, label='raw') plt.plot(df.index, after, label='fixed') r = np.corrcoef(before, after)[0, 1] plt.title(\"%s",
"'21'])]] # Use only data columns # 20 or 21 signifies 2020 or",
"fix dates src_trim['region'] = src['location'].values src_trim['cum_vaccinations'] = src['total_vaccinations'].values src_trim['daily_vaccinations'] = src['daily_vaccinations'].values src_trim['cum_people_vaccinated'] =",
"dict): filter_ = JHU_FILTER_DEFAULTS if filter_: for key, minimum in filter_.items(): enough =",
"datetime.strftime(y, '%m/%d/%y') def get_jhu_us_states_tests(data_path: str, filter_: Union[dict, bool] = False) -> None: \"\"\"",
"'%m/%d/%y') def fix_ct_dates(x): return datetime.strptime(str(x), '%Y%m%d') def get_countries(d: pd.DataFrame, filter_: Union[dict, bool] =",
"url = 'https://raw.githubusercontent.com/owid/covid-19-data/master/public/data/vaccinations/us_state_vaccinations.csv' src = pd.read_csv(url) src_trim = pd.DataFrame(columns=['dates2', 'region', 'cum_vaccinations', 'daily_vaccinations', 'people_vaccinated',",
"data directory. Returns: ctp_dfs (dict): Dictionary containing US States (keys) and dataframes containing",
"df.set_index('Province_State') df = df.groupby('Province_State').sum() # combine counties to create state level data df",
"df_combined['cum_tests'].diff() df_combined.loc[df_combined['new_tests'] < 0, 'new_tests'] = -1 # Handle cases where # cumulative",
"df.columns: df_trim['cum_tests'] = df['People_Tested'].fillna(-1).astype(int).values dfs.append(df_trim) if 'Total_Test_Results' in df.columns: df_trim['cum_tests'] = df['Total_Test_Results'].fillna(-1).astype(int).values dfs.append(df_trim)",
"path to data directory. roi (str): Region. Returns: population (int): Population count for",
"else: print(\"No data for %s\" % country) source = dfs['US'] states = source['confirmed'].index.tolist()",
"cases = dfs[0] deaths = dfs[1] recovered = dfs[2] # combine dfs df_rawtemp",
"== False: df['cum_recover'] = source['hospitalizedDischarged'].values else: df['cum_recover'] = np.NaN df.sort_values(by=['dates2'], inplace=True) # sort",
"= True, fixes: bool = False) -> None: \"\"\" Get global vaccines data"
] |
[
"== 0: img, width, height = camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency:",
"d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list)",
"device: 摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection,",
"self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width,",
"display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device self.network = network self.frequency =",
"jetson.inference import jetson.utils from multiprocessing import Process, Pipe import time class ObjectDetection(Process): \"\"\"",
"self.height = height self.display_window = display_window self.interval = time.perf_counter() self.stop = stop_process def",
"= [] for d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom,",
":param stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高 :param frequency: 探测频率 :param device:",
":param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device self.network",
":param width: 摄像头宽 :param height: 摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件 :param",
"for d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center])",
"self.width = width self.height = height self.display_window = display_window self.interval = time.perf_counter() self.stop",
"Process, Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2,",
"是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device self.network = network self.frequency = frequency",
"启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold)",
"[d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([])",
"self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera =",
"camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display = jetson.utils.glDisplay() while display.IsOpen()",
"self.conn2 = conn2 self.width = width self.height = height self.display_window = display_window self.interval",
"d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__ ==",
"frequency: 探测频率 :param device: 摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window:",
"ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\",",
"threshold self.conn1 = conn1 self.conn2 = conn2 self.width = width self.height = height",
"camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device)",
"height: 摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件 :param network: 选用的模型 :param threshold:",
"初始化识别进程 :param conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param",
"self.network = network self.frequency = frequency self.threshold = threshold self.conn1 = conn1 self.conn2",
"stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1",
"conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高 :param frequency: 探测频率",
"<gh_stars>1-10 import jetson.inference import jetson.utils from multiprocessing import Process, Pipe import time class",
"network self.frequency = frequency self.threshold = threshold self.conn1 = conn1 self.conn2 = conn2",
"height) if len(detections) > 0: detections_list = [] for d in detections: detections_list.append(",
"height = camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections",
"= jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and",
"self.stop.value == 0: img, width, height = camera.CaptureRGBA() if time.perf_counter() - self.interval >=",
"stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件",
"from multiprocessing import Process, Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def",
"摄像头宽 :param height: 摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件 :param network: 选用的模型",
":param frequency: 探测频率 :param device: 摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param",
"threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display = jetson.utils.glDisplay() while",
"self.conn1 = conn1 self.conn2 = conn2 self.width = width self.height = height self.display_window",
"摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5)",
"= network self.frequency = frequency self.threshold = threshold self.conn1 = conn1 self.conn2 =",
"self.height, self.device) # using V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value ==",
"= net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width, height) if len(detections) > 0:",
"= time.perf_counter() self.stop = stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def",
"阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device self.network = network",
"conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1:",
"self.interval = time.perf_counter() self.stop = stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect()",
"管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高 :param frequency: 探测频率 :param",
"display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志 :param width:",
"len(detections) > 0: detections_list = [] for d in detections: detections_list.append( [d.ClassID, d.Confidence,",
"height) if self.display_window: display.RenderOnce(img, width, height) if len(detections) > 0: detections_list = []",
"= width self.height = height self.display_window = display_window self.interval = time.perf_counter() self.stop =",
"width self.height = height self.display_window = display_window self.interval = time.perf_counter() self.stop = stop_process",
"def __init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\"",
":param height: 摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件 :param network: 选用的模型 :param",
"self.frequency = frequency self.threshold = threshold self.conn1 = conn1 self.conn2 = conn2 self.width",
"= jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0: img, width, height = camera.CaptureRGBA()",
"import Process, Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1,",
"= stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法)",
"= display_window self.interval = time.perf_counter() self.stop = stop_process def run(self): \"\"\" 启动进程 \"\"\"",
"display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0: img, width, height =",
"__init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程",
"探测频率 :param device: 摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口",
"选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device",
"net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display",
"self.threshold = threshold self.conn1 = conn1 self.conn2 = conn2 self.width = width self.height",
"= camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections =",
"d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__ == '__main__': od",
"if time.perf_counter() - self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img, width,",
"width, height) if self.display_window: display.RenderOnce(img, width, height) if len(detections) > 0: detections_list =",
"camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img,",
"detections = net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width, height) if len(detections) >",
"self.device = device self.network = network self.frequency = frequency self.threshold = threshold self.conn1",
"0: detections_list = [] for d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right,",
"in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else:",
":param conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param height:",
"\"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera",
"using V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0: img, width,",
"jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0: img, width, height = camera.CaptureRGBA() if",
"device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2: 管道2 :param",
"\"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2",
"self.device) # using V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0:",
"import jetson.inference import jetson.utils from multiprocessing import Process, Pipe import time class ObjectDetection(Process):",
":param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device",
"height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2:",
"img, width, height = camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency: self.interval =",
"detections_list = [] for d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top,",
"import jetson.utils from multiprocessing import Process, Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序",
"display.RenderOnce(img, width, height) if len(detections) > 0: detections_list = [] for d in",
"d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__ == '__main__': od = ObjectDetection(\"\")",
"detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else:",
"time.perf_counter() self.stop = stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self):",
"network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志",
"d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__ == '__main__': od =",
"self.stop = stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\"",
"\"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\",",
"self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__ == '__main__': od = ObjectDetection(\"\") od.camera_detect()",
">= 1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img, width, height) if self.display_window: display.RenderOnce(img,",
"conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param",
"jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value",
"摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__()",
"while display.IsOpen() and self.stop.value == 0: img, width, height = camera.CaptureRGBA() if time.perf_counter()",
"net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width, height) if len(detections) > 0: detections_list",
"\"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) #",
"= frequency self.threshold = threshold self.conn1 = conn1 self.conn2 = conn2 self.width =",
"detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([])",
"\"\"\" super(ObjectDetection, self).__init__() self.device = device self.network = network self.frequency = frequency self.threshold",
"\"\"\" def __init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True):",
"= height self.display_window = display_window self.interval = time.perf_counter() self.stop = stop_process def run(self):",
"stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\"",
"> 0: detections_list = [] for d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left,",
"width: 摄像头宽 :param height: 摄像头高 :param frequency: 探测频率 :param device: 摄像头设备文件 :param network:",
"jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display = jetson.utils.glDisplay()",
"管道1 :param conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高 :param",
"display.IsOpen() and self.stop.value == 0: img, width, height = camera.CaptureRGBA() if time.perf_counter() -",
":param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device self.network = network self.frequency",
"def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net",
"\"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network,",
"if self.display_window: display.RenderOnce(img, width, height) if len(detections) > 0: detections_list = [] for",
"d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__ == '__main__':",
"network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device =",
"- self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img, width, height) if",
"1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width,",
"time.perf_counter() detections = net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width, height) if len(detections)",
"def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height,",
"= conn2 self.width = width self.height = height self.display_window = display_window self.interval =",
"self).__init__() self.device = device self.network = network self.frequency = frequency self.threshold = threshold",
"frequency self.threshold = threshold self.conn1 = conn1 self.conn2 = conn2 self.width = width",
"run(self): \"\"\" 启动进程 \"\"\" self.conn2.close() self.camera_detect() def camera_detect(self): \"\"\" 探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net =",
"multiprocessing import Process, Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self,",
"height self.display_window = display_window self.interval = time.perf_counter() self.stop = stop_process def run(self): \"\"\"",
"V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0: img, width, height",
"conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高",
"0: img, width, height = camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency: self.interval",
"conn1 self.conn2 = conn2 self.width = width self.height = height self.display_window = display_window",
"display_window self.interval = time.perf_counter() self.stop = stop_process def run(self): \"\"\" 启动进程 \"\"\" self.conn2.close()",
"time.perf_counter() - self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img, width, height)",
"jetson.utils from multiprocessing import Process, Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\"",
"d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if",
"self.display_window = display_window self.interval = time.perf_counter() self.stop = stop_process def run(self): \"\"\" 启动进程",
"width, height = camera.CaptureRGBA() if time.perf_counter() - self.interval >= 1/self.frequency: self.interval = time.perf_counter()",
"device self.network = network self.frequency = frequency self.threshold = threshold self.conn1 = conn1",
"conn2 self.width = width self.height = height self.display_window = display_window self.interval = time.perf_counter()",
"\"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽",
"[] for d in detections: detections_list.append( [d.ClassID, d.Confidence, d.Left, d.Right, d.Top, d.Bottom, d.Area,",
"frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2: 管道2",
":param device: 摄像头设备文件 :param network: 选用的模型 :param threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\"",
"self.display_window: display.RenderOnce(img, width, height) if len(detections) > 0: detections_list = [] for d",
"threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param conn2: 管道2 :param stop_process:停止标志 :param",
"= jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using V4L2 display =",
"width, height) if len(detections) > 0: detections_list = [] for d in detections:",
"= device self.network = network self.frequency = frequency self.threshold = threshold self.conn1 =",
"d.Left, d.Right, d.Top, d.Bottom, d.Area, d.Center]) self.conn1.send(detections_list) else: self.conn1.send([]) else: self.conn1.send([]) if __name__",
"= conn1 self.conn2 = conn2 self.width = width self.height = height self.display_window =",
"threshold: 阈值(就是可信度多少时认定为识别物,一般是0.5) :param display_window: 是否显示监视窗口 \"\"\" super(ObjectDetection, self).__init__() self.device = device self.network =",
"import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process, width=320,",
"探测,并通过管道返回探测结果,没有达到刷新时间时,返回一个空的list(避免管道堵塞,其实应该有更好的方法) \"\"\" net = jetson.inference.detectNet(self.network, threshold=self.threshold) camera = jetson.utils.gstCamera(self.width, self.height, self.device) # using",
"= threshold self.conn1 = conn1 self.conn2 = conn2 self.width = width self.height =",
"width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5, display_window=True): \"\"\" 初始化识别进程 :param conn1: 管道1 :param",
"= time.perf_counter() detections = net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width, height) if",
"class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10,",
":param conn2: 管道2 :param stop_process:停止标志 :param width: 摄像头宽 :param height: 摄像头高 :param frequency:",
"if len(detections) > 0: detections_list = [] for d in detections: detections_list.append( [d.ClassID,",
"物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process, width=320, height=240, frequency=10, device=\"/dev/video0\", network=\"ssd-mobilenet-v2\", threshold=0.5,",
"# using V4L2 display = jetson.utils.glDisplay() while display.IsOpen() and self.stop.value == 0: img,",
"Pipe import time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process,",
"and self.stop.value == 0: img, width, height = camera.CaptureRGBA() if time.perf_counter() - self.interval",
"self.interval >= 1/self.frequency: self.interval = time.perf_counter() detections = net.Detect(img, width, height) if self.display_window:",
"self.interval = time.perf_counter() detections = net.Detect(img, width, height) if self.display_window: display.RenderOnce(img, width, height)",
"time class ObjectDetection(Process): \"\"\" 物体识别进程类,实际的识别程序 \"\"\" def __init__(self, conn1, conn2, stop_process, width=320, height=240,",
"super(ObjectDetection, self).__init__() self.device = device self.network = network self.frequency = frequency self.threshold ="
] |
[
"if successful else None) try: data = self.data_queue.get(timeout=timeout) return (True, data) except Exception",
"2-tuple: # (bool: whether successfully get data, any: data if successful else None)",
"r is None: # Received the final signal assert done_event.is_set() return elif done_event.is_set():",
"because we don't use `.join()`. else: while True: success, data = self._try_get_batch() if",
"obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not",
"= True # Intialize C side signal handlers for SIGBUS and SIGSEGV. Python",
"do no-op. return # Normal exit when last reference is gone / iterator",
"> 0: success, data = self._try_get_batch(self.timeout) if success: return data else: raise RuntimeError('DataLoader",
"Loader Multiprocessing Shutdown Logic ] for details on # the logic of this",
"torch.utils.data import _utils import torch import random import sys from torch._six import queue",
"`cancel_join_thread` on, weird things can # happen when a worker is killed by",
"thread exited unexpectedly') # In this case, `self.data_queue` is a `queue.Queue`,. But we",
"join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread",
"the prefetch loop for _ in range(2 * self.num_workers): self._put_indices() def __len__(self): return",
"through OS is to let the worker have a process handle # of",
"a variable, # see NOTE [ Python Traceback Reference Cycle Problem ] data_queue.put((idx,",
"= index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is None: # Received the final",
"in # `Event.set()`. So we need to guard this with SIGCHLD handler, #",
"Exit `pin_memory_thread` first because exiting workers may leave # corrupted data in `worker_result_queue`",
"not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id): # See",
"success: return data else: # while condition is false, i.e., pin_memory_thread died. raise",
"self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next =",
"in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue,",
"= os.getppid() self.manager_dead = False def is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid()",
"if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) %",
"done_event.is_set() return elif done_event.is_set(): # Done event is set. But I haven't received",
"(see comment above), we only register # pin_memory_thread once it is started. self.pin_memory_thread",
"args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon = True",
"starts, and __del__ tries to join but will get: # AssertionError: can only",
"bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" +",
"that we do not call .join() if program dies # before it starts,",
"event is set. But I haven't received the final signal # (None) yet.",
"{}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None) raise def _get_batch(self): #",
"else: raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success,",
"return not self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead =",
"thread shares the # same pipe handles with this loader thread. If the",
"w in self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str))",
"signal handlers for SIGBUS and SIGSEGV. Python signal # module's handlers are executed",
"not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0",
"if isinstance(e, queue.Empty): return (False, None) raise def _get_batch(self): # Fetches data from",
"is data in the queue. self.worker_result_queue.close() # Exit workers now. for q in",
"os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE",
"self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch",
"this function does is putting into queues that # we have called `cancel_join_thread`",
"# same pipe handles with this loader thread. If the handle is #",
"# may raise StopIteration batch = self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory:",
"# TODO: add limited pickling support for sharing an iterator # across multiple",
"`pin_memory_thread` is joined because that thread shares the # same pipe handles with",
"from C low-level # handlers, likely when the same fatal signal had already",
"has already been generated if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch)",
"details on the # logic of this function. try: collate._use_shared_memory = True #",
"happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is",
"(True, data) except Exception as e: # At timeout and error, we manually",
"if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event))",
"the next sample has already been generated if self.rcvd_idx in self.reorder_dict: batch =",
"self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next = __next__ # Python 2 compatibility",
"return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding += 1",
"= loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout",
"called `cancel_join_thread` on, weird things can # happen when a worker is killed",
"as multiprocessing import threading from torch.utils.data import _utils import torch import random import",
"make multiline KeyError msg readable by working around # a python bug https://bugs.python.org/issue2651",
"we do not call .join() if program dies # before it starts, and",
"will be put on this queue by the # current process. This **must**",
"e: # At timeout and error, we manually check whether any worker has",
"self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if self.num_workers > 0: self._shutdown_workers() class",
"structure only at the # end. # # FIXME: Unfortunately, for Windows, we",
"successful else None) try: data = self.data_queue.get(timeout=timeout) return (True, data) except Exception as",
"# https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id)",
"we set the attribute. # First time do `worker_result_queue.put` in this process. #",
"exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more data will be put",
"self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime",
"missing a worker # error detection mechanism here in this function, as it",
"it through OS is to let the worker have a process handle #",
"time # out even if there is data in the queue. self.worker_result_queue.close() #",
"same-process loading indices = next(self.sample_iter) # may raise StopIteration batch = self.collate_fn([self.dataset[i] for",
"self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more data will be put on this",
"def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from",
"except KeyboardInterrupt: # Main process will raise KeyboardInterrupt anyways. pass # Balanced batch",
"loop for _ in range(2 * self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def",
"batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size,",
"while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success: return data else: # while",
"low-level # handlers, likely when the same fatal signal had already happened #",
"# across multiple threads for HOGWILD. # Probably the best way to do",
"happen when a worker is killed by a signal, e.g., hanging in #",
"class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False,",
"we manually check whether any worker has # failed. Note that this is",
"= (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding += 1 self.send_idx += 1 def",
"hanging in # `Event.set()`. So we need to guard this with SIGCHLD handler,",
"raise RuntimeError('Pin memory thread exited unexpectedly') # In this case, `self.data_queue` is a",
"the # logic of this function. try: collate._use_shared_memory = True # Intialize C",
"the data queue # but signalling the end is tricky without a non-blocking",
"is to let the worker have a process handle # of the manager",
"use `.join()`. else: while True: success, data = self._try_get_batch() if success: return data",
"= loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available()",
"self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue,",
"SIGCHLD handler is also used for # worker failure detection. # # If",
"a process handle # of the manager and ask if the process status",
"if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if not self.manager_dead:",
"self._try_get_batch() if success: return data else: # while condition is false, i.e., pin_memory_thread",
"0: success, data = self._try_get_batch(self.timeout) if success: return data else: raise RuntimeError('DataLoader timed",
"detect # worker failures. if not all(w.is_alive() for w in self.workers): pids_str =",
"IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE # On Windows, the",
"this function. python_exit_status = _utils.python_exit_status if python_exit_status is True or python_exit_status is None:",
"case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more data",
"remove pids from the C side data structure only at the # end.",
"# Exit `pin_memory_thread` first because exiting workers may leave # corrupted data in",
"side data structure only at the # end. # # FIXME: Unfortunately, for",
"in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more",
"exc_info in a variable, # see NOTE [ Python Traceback Reference Cycle Problem",
"self.shutdown = True try: self.done_event.set() # Exit `pin_memory_thread` first because exiting workers may",
"torch import random import sys from torch._six import queue import os from torch.utils.data._utils",
"_utils.python_exit_status if python_exit_status is True or python_exit_status is None: # See (2) of",
"= 0 self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues = [] self.workers =",
"the logic of this function. python_exit_status = _utils.python_exit_status if python_exit_status is True or",
"but will get: # AssertionError: can only join a started process. w.start() self.index_queues.append(index_queue)",
"for i in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check",
"a `queue.Queue`,. But we don't # need to call `.task_done()` because we don't",
"if not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) ==",
"by moving the sample pushing # to a separate thread and then just",
"`pin_memory=True`, we also need check if `pin_memory_thread` had # died at timeouts. if",
"# First time do `worker_result_queue.put` in this process. # `cancel_join_thread` in case that",
"True # Intialize C side signal handlers for SIGBUS and SIGSEGV. Python signal",
"to self.workers list after # it started, so that we do not call",
"Returns a 2-tuple: # (bool: whether successfully get data, any: data if successful",
"for details on # the logic of this function. python_exit_status = _utils.python_exit_status if",
"out-of-order samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next = __next__ # Python",
"raises a `RuntimeError` if any worker died expectedly. This error # can come",
"a pipe. # Therefore, we only add a worker to self.workers list after",
"import torch import random import sys from torch._six import queue import os from",
"compatibility def __iter__(self): return self def _put_indices(self): assert self.batches_outstanding < 2 * self.num_workers",
"self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue,",
"= self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch",
"self.batches_outstanding > 0) idx, batch = self._get_batch() self.batches_outstanding -= 1 if idx !=",
"# sender status as the loop condition. # # This raises a `RuntimeError`",
"batch = self._get_batch() self.batches_outstanding -= 1 if idx != self.rcvd_idx: # store out-of-order",
"for SIGBUS and SIGSEGV. Python signal # module's handlers are executed after Python",
"sharing the data queue # but signalling the end is tricky without a",
"not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return",
"to detect worker failures for # Windows. For other platforms, a SIGCHLD handler",
"self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while True: assert",
"not self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead = False",
"only join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue()",
"then just sharing the data queue # but signalling the end is tricky",
"return batch # check if the next sample has already been generated if",
"ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE #",
"loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory",
"self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success: return data else: # while condition",
"worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn)",
"of the manager and ask if the process status has changed. class ManagerWatchdog(object):",
"loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False",
"self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(),",
"self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon",
"have called `cancel_join_thread` on, weird things can # happen when a worker is",
"# # This raises a `RuntimeError` if any worker died expectedly. This error",
"any worker died expectedly. This error # can come from either the SIGCHLD",
"] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt: # Main process",
"self.rcvd_idx = 0 self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues = [] self.workers",
"when last reference is gone / iterator is depleted. # See (1) and",
"__init__(self, loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers =",
"True pin_memory_thread.start() # Similar to workers (see comment above), we only register #",
"# Done event is set. But I haven't received the final signal #",
"status as the loop condition. # # This raises a `RuntimeError` if any",
"on # the logic of this function. python_exit_status = _utils.python_exit_status if python_exit_status is",
"None: # See (2) of the note. If Python is shutting down, do",
"self def _put_indices(self): assert self.batches_outstanding < 2 * self.num_workers indices = next(self.sample_iter, None)",
"is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return not self.manager_dead def",
"depleted. # See (1) and the second half of the note. if not",
"failures for # Windows. For other platforms, a SIGCHLD handler is also used",
"0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown = False",
"BOOL, HANDLE # On Windows, the parent ID of the worker process remains",
"as inner loop of fetching without timeout, with the # sender status as",
"as it # doesn't provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set =",
"True: success, data = self._try_get_batch() if success: return data def __next__(self): if self.num_workers",
"= next(self.sample_iter, None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx",
"Probably the best way to do this is by moving the sample pushing",
"in `worker_result_queue` which `pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'): # Use hasattr",
"collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object): def __before_hook__(self):",
"(only for non-Windows platforms), or the manual check below on errors # and",
"Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt:",
"= 0 self.rcvd_idx = 0 self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues =",
"process status has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32 =",
"the # sender status as the loop condition. # # This raises a",
"# died at timeouts. if self.timeout > 0: success, data = self._try_get_batch(self.timeout) if",
"def __iter__(self): return self def _put_indices(self): assert self.batches_outstanding < 2 * self.num_workers indices",
"== KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg)",
"platforms), or the manual check below on errors # and timeouts. # #",
"processing steps. continue idx, batch_indices = r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for",
"= loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set =",
"loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed",
"dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self):",
"num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object): def",
"In this case, `self.data_queue` is a `queue.Queue`,. But we don't # need to",
"by the # current process. q.close() for w in self.workers: w.join() finally: #",
"check it through OS is to let the worker have a process handle",
"put on this queue by the # current process. This **must** be called",
"from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes",
"a worker # error detection mechanism here in this function, as it #",
"def __init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL,",
"this function, as it # doesn't provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self))",
"self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id):",
"for w in self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited",
"started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread(",
"in a loop. This is the only mechanism to detect worker failures for",
"success, data = self._try_get_batch() if success: return data def __next__(self): if self.num_workers ==",
"prefetch loop for _ in range(2 * self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler)",
"this queue by the # current process. This **must** be called after #",
"= _utils.pin_memory.pin_memory_batch(batch) return batch # check if the next sample has already been",
"sender status as the loop condition. # # This raises a `RuntimeError` if",
"come from either the SIGCHLD handler in `_utils/signal_handling.py` # (only for non-Windows platforms),",
"can # also be used as inner loop of fetching without timeout, with",
"import torch.multiprocessing as multiprocessing import threading from torch.utils.data import _utils import torch import",
"samples)) del samples except KeyboardInterrupt: # Main process will raise KeyboardInterrupt anyways. pass",
"raise def _get_batch(self): # Fetches data from `self.data_queue`. # # We check workers'",
"`self.data_queue`. # # We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we",
"range(2 * self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries",
"only at the # end. # # FIXME: Unfortunately, for Windows, we are",
"current process. q.close() for w in self.workers: w.join() finally: # Even though all",
"join but will get: # AssertionError: can only join a started process. w.start()",
"_ in range(2 * self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL):",
"data will be put on this queue by the # current process. q.close()",
"module's handlers are executed after Python returns from C low-level # handlers, likely",
"def _shutdown_workers(self): # See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for",
"tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the prefetch",
"if program dies # before it starts, and __del__ tries to join but",
"loader class HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler",
"we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is the only",
"time do `worker_result_queue.put` in this process. # `cancel_join_thread` in case that `pin_memory_thread` exited.",
"0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0",
"ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE # On Windows, the parent ID",
"check whether any worker has # failed. Note that this is the only",
"if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg readable by working around #",
"None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1)",
"Done event is set. But I haven't received the final signal # (None)",
"random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id) watchdog = ManagerWatchdog() while",
"a worker is killed by a signal, e.g., hanging in # `Event.set()`. So",
"be put on this queue by the # current process. q.close() for w",
"self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding += 1 self.send_idx",
"Note that this is the only mechanism for Windows to detect # worker",
"But we don't # need to call `.task_done()` because we don't use `.join()`.",
"the same fatal signal had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1)",
"working around # a python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\"",
"self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers))",
"self.manager_dead = os.getppid() != self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event,",
"status has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32',",
"executed after Python returns from C low-level # handlers, likely when the same",
"get it, and skip the # processing steps. continue idx, batch_indices = r",
"last reference is gone / iterator is depleted. # See (1) and the",
"Intialize C side signal handlers for SIGBUS and SIGSEGV. Python signal # module's",
"Python Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples",
"next(self.sample_iter, None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx +",
"are executed after Python returns from C low-level # handlers, likely when the",
"do `worker_result_queue.put` in this process. # `cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread()",
"# This raises a `RuntimeError` if any worker died expectedly. This error #",
"keep continuing until get it, and skip the # processing steps. continue idx,",
"the best way to do this is by moving the sample pushing #",
"class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead = False def is_alive(self): if",
"check if `pin_memory_thread` had # died at timeouts. if self.timeout > 0: success,",
"# Therefore, we only add a worker to self.workers list after # it",
"pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() #",
"> 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding =",
"Multiprocessing Shutdown Logic ] for details on # the logic of this function.",
"isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg readable by working around # a",
"ask if the process status has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid =",
"return self._process_next_batch(batch) next = __next__ # Python 2 compatibility def __iter__(self): return self",
"= queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True",
"= multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed",
"See (2) of the note. If Python is shutting down, do no-op. return",
"# Fetches data from `self.data_queue`. # # We check workers' status every `MP_STATUS_CHECK_INTERVAL`",
"os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import",
"is set. But I haven't received the final signal # (None) yet. I",
"_utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the",
"self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more data will be put on",
"AssertionError: can only join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue",
"data = self._try_get_batch() if success: return data def __next__(self): if self.num_workers == 0:",
"just sharing the data queue # but signalling the end is tricky without",
"base_seed + i, self.worker_init_fn, i)) w.daemon = True # NB: Process.start() actually take",
"into queues that # we have called `cancel_join_thread` on, weird things can #",
"self.manager_pid = os.getppid() self.manager_dead = False def is_alive(self): if not self.manager_dead: self.manager_dead =",
"self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self):",
"side signal handlers for SIGBUS and SIGSEGV. Python signal # module's handlers are",
"# It is important that we don't store exc_info in a variable, #",
"hasattr(self, 'pin_memory_thread'): # Use hasattr in case error happens before we set the",
"collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn,",
"self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check if the next sample has",
"and self.batches_outstanding > 0) idx, batch = self._get_batch() self.batches_outstanding -= 1 if idx",
"else: # while condition is false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread",
"index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn,",
"Multiprocessing Shutdown Logic ] for details on the # logic of this function.",
"# Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid)",
"(1) and the second half of the note. if not self.shutdown: self.shutdown =",
"by the # current process. This **must** be called after # `pin_memory_thread` is",
"the end is tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\")",
".join() if program dies # before it starts, and __del__ tries to join",
"index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id): # See NOTE [ Data Loader",
"If `pin_memory=True`, we also need check if `pin_memory_thread` had # died at timeouts.",
"self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False",
"import sys from torch._six import queue import os from torch.utils.data._utils import collate, signal_handling,",
"# Main process will raise KeyboardInterrupt anyways. pass # Balanced batch sampler and",
"success: return data else: raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif self.pin_memory:",
"0 return not self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead",
"\\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE",
"get: # AssertionError: can only join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if",
"idx, batch = self._get_batch() self.batches_outstanding -= 1 if idx != self.rcvd_idx: # store",
"torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue =",
"in self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if",
"False def __del__(self): if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset,",
"drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last,",
"fetching without timeout, with the # sender status as the loop condition. #",
"for Windows to detect # worker failures. if not all(w.is_alive() for w in",
"1 def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make",
"+ 1) % self.num_workers self.batches_outstanding += 1 self.send_idx += 1 def _process_next_batch(self, batch):",
"will get: # AssertionError: can only join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w)",
"signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id) watchdog =",
"self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype",
"online train loader class HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset self.collate_fn =",
"case error happens before we set the attribute. # First time do `worker_result_queue.put`",
"and ask if the process status has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid",
"elif done_event.is_set(): # Done event is set. But I haven't received the final",
"# need to call `.task_done()` because we don't use `.join()`. else: while True:",
"pin_memory_thread once it is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self),",
"except Exception as e: # At timeout and error, we manually check whether",
"self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon = True # NB:",
"has # failed. Note that this is the only mechanism for Windows to",
"multiprocessing.Event() self.index_queues = [] self.workers = [] for i in range(self.num_workers): index_queue =",
"failed. Note that this is the only mechanism for Windows to detect #",
"IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE # On",
"import ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE # On Windows, the parent",
"is a `queue.Queue`,. But we don't # need to call `.task_done()` because we",
"# to a separate thread and then just sharing the data queue #",
"batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO: add limited pickling support for sharing",
"self.batches_outstanding += 1 self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices()",
"worker has # failed. Note that this is the only mechanism for Windows",
"anyways. pass # Balanced batch sampler and online train loader class HookDataloderIter(object): def",
"data if successful else None) try: data = self.data_queue.get(timeout=timeout) return (True, data) except",
"Therefore, we only add a worker to self.workers list after # it started,",
"we don't # need to call `.task_done()` because we don't use `.join()`. else:",
"= False def is_alive(self): if not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead",
"is false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread exited unexpectedly') # In",
"memory thread exited unexpectedly') # In this case, `self.data_queue` is a `queue.Queue`,. But",
"dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset,",
"is depleted. # See (1) and the second half of the note. if",
"DWORD, BOOL, HANDLE # On Windows, the parent ID of the worker process",
"self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE",
"in the queue. self.worker_result_queue.close() # Exit workers now. for q in self.index_queues: q.put(None)",
"[] self.workers = [] for i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w",
"that # we have called `cancel_join_thread` on, weird things can # happen when",
"return elif done_event.is_set(): # Done event is set. But I haven't received the",
"SIGCHLD handler, # and remove pids from the C side data structure only",
"here in this function, as it # doesn't provide a SIGCHLD handler. if",
"class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes =",
"is by moving the sample pushing # to a separate thread and then",
"# Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not",
"function. python_exit_status = _utils.python_exit_status if python_exit_status is True or python_exit_status is None: #",
"done_event.is_set(): # Done event is set. But I haven't received the final signal",
"def _put_indices(self): assert self.batches_outstanding < 2 * self.num_workers indices = next(self.sample_iter, None) if",
"= next(self.sample_iter) # may raise StopIteration batch = self.collate_fn([self.dataset[i] for i in indices])",
"Exception as e: # At timeout and error, we manually check whether any",
"mechanism here in this function, as it # doesn't provide a SIGCHLD handler.",
"from `data_queue` for a given timeout. This can # also be used as",
"in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check if the",
"function does is putting into queues that # we have called `cancel_join_thread` on,",
"because that thread shares the # same pipe handles with this loader thread.",
"worker failure detection. # # If `pin_memory=True`, we also need check if `pin_memory_thread`",
"+ i, self.worker_init_fn, i)) w.daemon = True # NB: Process.start() actually take some",
"self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead else: class ManagerWatchdog(object): def",
"return self def _put_indices(self): assert self.batches_outstanding < 2 * self.num_workers indices = next(self.sample_iter,",
"pass # Balanced batch sampler and online train loader class HookDataloderIter(object): def __init__(self,",
"readable by working around # a python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError",
"in range(2 * self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): #",
"pushing # to a separate thread and then just sharing the data queue",
"batch_indices = r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__()",
"all this function does is putting into queues that # we have called",
"= (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE =",
"threads for HOGWILD. # Probably the best way to do this is by",
"# it started, so that we do not call .join() if program dies",
"self.workers = [] for i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w =",
"process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop,",
"elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success: return data else:",
"indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers",
"self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained",
"self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper):",
"manual check below on errors # and timeouts. # # Returns a 2-tuple:",
"call .join() if program dies # before it starts, and __del__ tries to",
"end is tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def",
"i in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check if",
"self.num_workers == 0: # same-process loading indices = next(self.sample_iter) # may raise StopIteration",
"self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown = False self.send_idx",
"it # doesn't provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False",
"self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the prefetch loop for _ in",
"tries to join but will get: # AssertionError: can only join a started",
"error happens before we set the attribute. # First time do `worker_result_queue.put` in",
"We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by running",
"for a given timeout. This can # also be used as inner loop",
"If Python is shutting down, do no-op. return # Normal exit when last",
"set. But I haven't received the final signal # (None) yet. I will",
"{} self.done_event = multiprocessing.Event() self.index_queues = [] self.workers = [] for i in",
"threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to",
"`pin_memory_thread` first because exiting workers may leave # corrupted data in `worker_result_queue` which",
"a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if self.num_workers",
"finally: # Even though all this function does is putting into queues that",
"[ Python Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del",
"# same-process loading indices = next(self.sample_iter) # may raise StopIteration batch = self.collate_fn([self.dataset[i]",
"(HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000",
"data structure only at the # end. # # FIXME: Unfortunately, for Windows,",
"`MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop.",
"if hasattr(self, 'pin_memory_thread'): # Use hasattr in case error happens before we set",
"from ctypes.wintypes import DWORD, BOOL, HANDLE # On Windows, the parent ID of",
"handlers are executed after Python returns from C low-level # handlers, likely when",
"obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead else:",
"timeouts. if self.timeout > 0: success, data = self._try_get_batch(self.timeout) if success: return data",
"continuing until get it, and skip the # processing steps. continue idx, batch_indices",
"watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if",
"the second half of the note. if not self.shutdown: self.shutdown = True try:",
"seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success: return data",
"for w in self.workers: w.join() finally: # Even though all this function does",
"# Even though all this function does is putting into queues that #",
"= False self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict = {} self.done_event =",
"collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except Exception: # It is important that",
"ctypes.wintypes import DWORD, BOOL, HANDLE # On Windows, the parent ID of the",
"is not None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL)",
"signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD,",
"is shutting down, do no-op. return # Normal exit when last reference is",
"torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive():",
"died at timeouts. if self.timeout > 0: success, data = self._try_get_batch(self.timeout) if success:",
"the only way to check it through OS is to let the worker",
"# # Returns a 2-tuple: # (bool: whether successfully get data, any: data",
"# can come from either the SIGCHLD handler in `_utils/signal_handling.py` # (only for",
"and __del__ tries to join but will get: # AssertionError: can only join",
"except queue.Empty: continue if r is None: # Received the final signal assert",
"q in self.index_queues: q.put(None) # Indicate that no more data will be put",
"happens before we set the attribute. # First time do `worker_result_queue.put` in this",
"the parent ID of the worker process remains unchanged when the manager process",
"best way to do this is by moving the sample pushing # to",
"loop condition. # # This raises a `RuntimeError` if any worker died expectedly.",
"< 2 * self.num_workers indices = next(self.sample_iter, None) if indices is None: return",
"= True # prime the prefetch loop for _ in range(2 * self.num_workers):",
"this with SIGCHLD handler, # and remove pids from the C side data",
"try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except Exception: #",
"samples except KeyboardInterrupt: # Main process will raise KeyboardInterrupt anyways. pass # Balanced",
"from either the SIGCHLD handler in `_utils/signal_handling.py` # (only for non-Windows platforms), or",
"queue # but signalling the end is tricky without a non-blocking API raise",
"# is gone, and the only way to check it through OS is",
"exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None) raise def _get_batch(self): # Fetches",
"is the only mechanism for Windows to detect # worker failures. if not",
"Use hasattr in case error happens before we set the attribute. # First",
"# make multiline KeyError msg readable by working around # a python bug",
"self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn",
"return batch def __getstate__(self): # TODO: add limited pickling support for sharing an",
"in case error happens before we set the attribute. # First time do",
"raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO:",
"if self.timeout > 0: success, data = self._try_get_batch(self.timeout) if success: return data else:",
"multiple threads for HOGWILD. # Probably the best way to do this is",
"idx, batch_indices = r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i in batch_indices])",
"`pin_memory_thread` had # died at timeouts. if self.timeout > 0: success, data =",
"# handlers, likely when the same fatal signal had already happened # again.",
"after Python returns from C low-level # handlers, likely when the same fatal",
"sample pushing # to a separate thread and then just sharing the data",
"_utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the prefetch loop for _ in range(2",
"__del__ tries to join but will get: # AssertionError: can only join a",
"KeyboardInterrupt: # Main process will raise KeyboardInterrupt anyways. pass # Balanced batch sampler",
"0 self.worker_pids_set = False self.shutdown = False self.send_idx = 0 self.rcvd_idx = 0",
"non-Windows platforms), or the manual check below on errors # and timeouts. #",
"RuntimeError('Pin memory thread exited unexpectedly') # In this case, `self.data_queue` is a `queue.Queue`,.",
"skip the # processing steps. continue idx, batch_indices = r try: dataset.__before_hook__() samples",
"self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True",
"# and timeouts. # # Returns a 2-tuple: # (bool: whether successfully get",
"the only mechanism to detect worker failures for # Windows. For other platforms,",
"# worker failures. if not all(w.is_alive() for w in self.workers): pids_str = ',",
"w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue,",
"# Windows. For other platforms, a SIGCHLD handler is also used for #",
"# worker failure detection. # # If `pin_memory=True`, we also need check if",
"I haven't received the final signal # (None) yet. I will keep continuing",
"self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to workers (see comment",
"self.data_queue.get(timeout=timeout) return (True, data) except Exception as e: # At timeout and error,",
"self.index_queues: q.put(None) # Indicate that no more data will be put on this",
"dataset.__after_hook__() except Exception: # It is important that we don't store exc_info in",
"until get it, and skip the # processing steps. continue idx, batch_indices =",
"a process and pass the arguments over via a pipe. # Therefore, we",
"died. raise RuntimeError('Pin memory thread exited unexpectedly') # In this case, `self.data_queue` is",
"function. try: collate._use_shared_memory = True # Intialize C side signal handlers for SIGBUS",
"loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if",
"'.join(str(w.pid) for w in self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {})",
"the note. if not self.shutdown: self.shutdown = True try: self.done_event.set() # Exit `pin_memory_thread`",
"# start a process and pass the arguments over via a pipe. #",
"on errors # and timeouts. # # Returns a 2-tuple: # (bool: whether",
"case, but Py2 will just time # out even if there is data",
"batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler,",
"if self.num_workers == 0: # same-process loading indices = next(self.sample_iter) # may raise",
"not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if not self.manager_dead: #",
"before it starts, and __del__ tries to join but will get: # AssertionError:",
"indices = next(self.sample_iter, None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx =",
"will just time # out even if there is data in the queue.",
"0 self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues = [] self.workers = []",
"needs to # start a process and pass the arguments over via a",
"handle is # closed, Py3 will error in this case, but Py2 will",
"is None: # See (2) of the note. If Python is shutting down,",
"a loop. This is the only mechanism to detect worker failures for #",
"args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to workers (see",
"1 if idx != self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] = batch continue",
"# If `pin_memory=True`, we also need check if `pin_memory_thread` had # died at",
"self._try_get_batch() if success: return data def __next__(self): if self.num_workers == 0: # same-process",
"will raise KeyboardInterrupt anyways. pass # Balanced batch sampler and online train loader",
"we only register # pin_memory_thread once it is started. self.pin_memory_thread = pin_memory_thread else:",
"# Python 2 compatibility def __iter__(self): return self def _put_indices(self): assert self.batches_outstanding <",
"[ Data Loader Multiprocessing Shutdown Logic ] for details on the # logic",
"self.data_queue = queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon =",
"in `_utils/signal_handling.py` # (only for non-Windows platforms), or the manual check below on",
"(False, None) raise def _get_batch(self): # Fetches data from `self.data_queue`. # # We",
"This raises a `RuntimeError` if any worker died expectedly. This error # can",
"before we set the attribute. # First time do `worker_result_queue.put` in this process.",
"multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon",
"if python_exit_status is True or python_exit_status is None: # See (2) of the",
"continue if r is None: # Received the final signal assert done_event.is_set() return",
"and pass the arguments over via a pipe. # Therefore, we only add",
"of the note. If Python is shutting down, do no-op. return # Normal",
"self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed =",
"of fetching without timeout, with the # sender status as the loop condition.",
"died expectedly. This error # can come from either the SIGCHLD handler in",
"# happen when a worker is killed by a signal, e.g., hanging in",
"self._shutdown_workers() raise StopIteration while True: assert (not self.shutdown and self.batches_outstanding > 0) idx,",
"collate_fn, seed, init_fn, worker_id): # See NOTE [ Data Loader Multiprocessing Shutdown Logic",
"below on errors # and timeouts. # # Returns a 2-tuple: # (bool:",
"whether any worker has # failed. Note that this is the only mechanism",
"worker died expectedly. This error # can come from either the SIGCHLD handler",
"(self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding += 1 self.send_idx += 1 def _process_next_batch(self,",
"only mechanism for Windows to detect # worker failures. if not all(w.is_alive() for",
"def __init__(self): self.manager_pid = os.getppid() self.manager_dead = False def is_alive(self): if not self.manager_dead:",
"= self._try_get_batch() if success: return data else: # while condition is false, i.e.,",
"`_utils/signal_handling.py` # (only for non-Windows platforms), or the manual check below on errors",
"pipe handles with this loader thread. If the handle is # closed, Py3",
"HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler",
"indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding += 1 self.send_idx +=",
"i in batch_indices]) dataset.__after_hook__() except Exception: # It is important that we don't",
"r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is None: # Received the",
"is True or python_exit_status is None: # See (2) of the note. If",
"# in a loop. This is the only mechanism to detect worker failures",
"== 0 return not self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid()",
"on this queue by the # current process. This **must** be called after",
"ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD,",
"self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype",
"1) % self.num_workers self.batches_outstanding += 1 self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx",
"data_queue.put((idx, samples)) del samples except KeyboardInterrupt: # Main process will raise KeyboardInterrupt anyways.",
"_get_batch(self): # Fetches data from `self.data_queue`. # # We check workers' status every",
"False self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict = {} self.done_event = multiprocessing.Event()",
"loader thread. If the handle is # closed, Py3 will error in this",
"need check if `pin_memory_thread` had # died at timeouts. if self.timeout > 0:",
"ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead = False def is_alive(self): if not",
"timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from `data_queue` for a given timeout. This",
"when the same fatal signal had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers()",
"and error, we manually check whether any worker has # failed. Note that",
"around # a python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\" in",
"(2) of the note. If Python is shutting down, do no-op. return #",
"# doesn't provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def",
"if init_fn is not None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r",
"some time as it needs to # start a process and pass the",
"# corrupted data in `worker_result_queue` which `pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'):",
"need to call `.task_done()` because we don't use `.join()`. else: while True: success,",
"and timeouts. # # Returns a 2-tuple: # (bool: whether successfully get data,",
"= self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead else: class ManagerWatchdog(object): def __init__(self):",
"len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from `data_queue` for a",
"or python_exit_status is None: # See (2) of the note. If Python is",
"data_queue, done_event, collate_fn, seed, init_fn, worker_id): # See NOTE [ Data Loader Multiprocessing",
"self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead = False def",
"def __del__(self): if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1,",
"data) except Exception as e: # At timeout and error, we manually check",
"data from `data_queue` for a given timeout. This can # also be used",
"= threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar",
"will error in this case, but Py2 will just time # out even",
"final signal assert done_event.is_set() return elif done_event.is_set(): # Done event is set. But",
"Normal exit when last reference is gone / iterator is depleted. # See",
"mechanism to detect worker failures for # Windows. For other platforms, a SIGCHLD",
"note. If Python is shutting down, do no-op. return # Normal exit when",
"for q in self.index_queues: q.put(None) # Indicate that no more data will be",
"of the worker process remains unchanged when the manager process # is gone,",
"if success: return data def __next__(self): if self.num_workers == 0: # same-process loading",
"and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers",
"queue by the # current process. q.close() for w in self.workers: w.join() finally:",
"return data else: # while condition is false, i.e., pin_memory_thread died. raise RuntimeError('Pin",
"None) raise def _get_batch(self): # Fetches data from `self.data_queue`. # # We check",
"self.timeout > 0: success, data = self._try_get_batch(self.timeout) if success: return data else: raise",
"# prime the prefetch loop for _ in range(2 * self.num_workers): self._put_indices() def",
"cannot be pickled\") def _shutdown_workers(self): # See NOTE [ Data Loader Multiprocessing Shutdown",
"# `Event.set()`. So we need to guard this with SIGCHLD handler, # and",
"if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None,",
"else: class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.manager_dead = False def is_alive(self):",
"used for # worker failure detection. # # If `pin_memory=True`, we also need",
"if success: return data else: # while condition is false, i.e., pin_memory_thread died.",
"way to check it through OS is to let the worker have a",
"worker have a process handle # of the manager and ask if the",
"if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty):",
"threading from torch.utils.data import _utils import torch import random import sys from torch._six",
"self.index_queues = [] self.workers = [] for i in range(self.num_workers): index_queue = multiprocessing.Queue()",
"started. self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in",
"try: data = self.data_queue.get(timeout=timeout) return (True, data) except Exception as e: # At",
"we need to guard this with SIGCHLD handler, # and remove pids from",
"self.done_event = multiprocessing.Event() self.index_queues = [] self.workers = [] for i in range(self.num_workers):",
"try: collate._use_shared_memory = True # Intialize C side signal handlers for SIGBUS and",
"# also be used as inner loop of fetching without timeout, with the",
"python_exit_status = _utils.python_exit_status if python_exit_status is True or python_exit_status is None: # See",
"HANDLE # On Windows, the parent ID of the worker process remains unchanged",
"we don't store exc_info in a variable, # see NOTE [ Python Traceback",
"so that we do not call .join() if program dies # before it",
"+ batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO: add limited",
"multiline KeyError msg readable by working around # a python bug https://bugs.python.org/issue2651 if",
"out even if there is data in the queue. self.worker_result_queue.close() # Exit workers",
"On Windows, the parent ID of the worker process remains unchanged when the",
"self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg readable by working around",
"non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): # See NOTE [",
"Indicate that no more data will be put on this queue by the",
"that thread shares the # same pipe handles with this loader thread. If",
"True # prime the prefetch loop for _ in range(2 * self.num_workers): self._put_indices()",
"torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from",
"arguments over via a pipe. # Therefore, we only add a worker to",
"that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more data will",
"C low-level # handlers, likely when the same fatal signal had already happened",
"`pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'): # Use hasattr in case error",
"_utils import torch import random import sys from torch._six import queue import os",
"self._get_batch() self.batches_outstanding -= 1 if idx != self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx]",
"But I haven't received the final signal # (None) yet. I will keep",
"Python returns from C low-level # handlers, likely when the same fatal signal",
"data in `worker_result_queue` which `pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'): # Use",
"worker to self.workers list after # it started, so that we do not",
"True: assert (not self.shutdown and self.batches_outstanding > 0) idx, batch = self._get_batch() self.batches_outstanding",
"worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object): def __before_hook__(self): pass def __after_hook__(self): pass",
"self.shutdown = False self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict = {} self.done_event",
"RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data =",
"thread. If the handle is # closed, Py3 will error in this case,",
"**must** be called after # `pin_memory_thread` is joined because that thread shares the",
"index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is None: # Received the final signal",
"+= 1 def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): #",
"# (only for non-Windows platforms), or the manual check below on errors #",
"# closed, Py3 will error in this case, but Py2 will just time",
"[] for i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop,",
"is important that we don't store exc_info in a variable, # see NOTE",
"pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object): def __before_hook__(self): pass",
"now. for q in self.index_queues: q.put(None) # Indicate that no more data will",
"returns from C low-level # handlers, likely when the same fatal signal had",
"= self.data_queue.get(timeout=timeout) return (True, data) except Exception as e: # At timeout and",
"a SIGCHLD handler is also used for # worker failure detection. # #",
"# See (1) and the second half of the note. if not self.shutdown:",
"to a separate thread and then just sharing the data queue # but",
"C side data structure only at the # end. # # FIXME: Unfortunately,",
"0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead =",
"Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead",
"by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is the only mechanism to",
"Exit workers now. for q in self.index_queues: q.put(None) # Indicate that no more",
"this case, `self.data_queue` is a `queue.Queue`,. But we don't # need to call",
"self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while True: assert (not self.shutdown and self.batches_outstanding",
"self.worker_init_fn, i)) w.daemon = True # NB: Process.start() actually take some time as",
"set the attribute. # First time do `worker_result_queue.put` in this process. # `cancel_join_thread`",
"don't # need to call `.task_done()` because we don't use `.join()`. else: while",
"w.join() finally: # Even though all this function does is putting into queues",
"will keep continuing until get it, and skip the # processing steps. continue",
"important that we don't store exc_info in a variable, # see NOTE [",
"this case, but Py2 will just time # out even if there is",
"data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try:",
"= os.getppid() != self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn,",
"a worker to self.workers list after # it started, so that we do",
"with the # sender status as the loop condition. # # This raises",
"attribute. # First time do `worker_result_queue.put` in this process. # `cancel_join_thread` in case",
"# # We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve",
"data = self.data_queue.get(timeout=timeout) return (True, data) except Exception as e: # At timeout",
"def _get_batch(self): # Fetches data from `self.data_queue`. # # We check workers' status",
"if `pin_memory_thread` had # died at timeouts. if self.timeout > 0: success, data",
"false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread exited unexpectedly') # In this",
"a separate thread and then just sharing the data queue # but signalling",
"if idx != self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] = batch continue return",
"# Normal exit when last reference is gone / iterator is depleted. #",
"= loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item()",
"all(w.is_alive() for w in self.workers): pids_str = ', '.join(str(w.pid) for w in self.workers",
"for sharing an iterator # across multiple threads for HOGWILD. # Probably the",
"StopIteration while True: assert (not self.shutdown and self.batches_outstanding > 0) idx, batch =",
"queue.Empty): return (False, None) raise def _get_batch(self): # Fetches data from `self.data_queue`. #",
"self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon = True # NB: Process.start()",
"yet. I will keep continuing until get it, and skip the # processing",
"thread and then just sharing the data queue # but signalling the end",
"when a worker is killed by a signal, e.g., hanging in # `Event.set()`.",
"an iterator # across multiple threads for HOGWILD. # Probably the best way",
"# check if the next sample has already been generated if self.rcvd_idx in",
"can come from either the SIGCHLD handler in `_utils/signal_handling.py` # (only for non-Windows",
"# logic of this function. try: collate._use_shared_memory = True # Intialize C side",
"manually check whether any worker has # failed. Note that this is the",
"done_event, collate_fn, seed, init_fn, worker_id): # See NOTE [ Data Loader Multiprocessing Shutdown",
"pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to workers (see comment above), we only",
"timeout and error, we manually check whether any worker has # failed. Note",
"self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and",
"watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is None: #",
"w in self.workers: w.join() finally: # Even though all this function does is",
"this process. # `cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() #",
"is gone, and the only way to check it through OS is to",
"self.workers list after # it started, so that we do not call .join()",
"index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i,",
"of this function. python_exit_status = _utils.python_exit_status if python_exit_status is True or python_exit_status is",
"C side signal handlers for SIGBUS and SIGSEGV. Python signal # module's handlers",
"actually take some time as it needs to # start a process and",
"if there is data in the queue. self.worker_result_queue.close() # Exit workers now. for",
"the final signal # (None) yet. I will keep continuing until get it,",
"python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\"",
"False def is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return not",
"return data else: raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif self.pin_memory: while",
"the queue. self.worker_result_queue.close() # Exit workers now. for q in self.index_queues: q.put(None) #",
"unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None) raise def _get_batch(self): # Fetches data",
"# AssertionError: can only join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory:",
"if the process status has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid()",
"target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon =",
"had # died at timeouts. if self.timeout > 0: success, data = self._try_get_batch(self.timeout)",
"data def __next__(self): if self.num_workers == 0: # same-process loading indices = next(self.sample_iter)",
"first because exiting workers may leave # corrupted data in `worker_result_queue` which `pin_memory_thread`",
"i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue,",
"the worker process remains unchanged when the manager process # is gone, and",
"self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues",
"handle # of the manager and ask if the process status has changed.",
"if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0:",
"the # end. # # FIXME: Unfortunately, for Windows, we are missing a",
"return (True, data) except Exception as e: # At timeout and error, we",
"not call .join() if program dies # before it starts, and __del__ tries",
"it starts, and __del__ tries to join but will get: # AssertionError: can",
"pass the arguments over via a pipe. # Therefore, we only add a",
"condition. # # This raises a `RuntimeError` if any worker died expectedly. This",
"# See (2) of the note. If Python is shutting down, do no-op.",
"we only add a worker to self.workers list after # it started, so",
"self.shutdown: self.shutdown = True try: self.done_event.set() # Exit `pin_memory_thread` first because exiting workers",
"for # Windows. For other platforms, a SIGCHLD handler is also used for",
"sample has already been generated if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return",
"0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if",
"assert (not self.shutdown and self.batches_outstanding > 0) idx, batch = self._get_batch() self.batches_outstanding -=",
"ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if not self.manager_dead: # Value obtained from",
"add a worker to self.workers list after # it started, so that we",
"self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes",
"return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id): #",
"SIGCHLD handler in `_utils/signal_handling.py` # (only for non-Windows platforms), or the manual check",
"OS is to let the worker have a process handle # of the",
"See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details on #",
"torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to workers (see comment above),",
"self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid",
"data queue # but signalling the end is tricky without a non-blocking API",
"and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch",
"self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data",
"random import sys from torch._six import queue import os from torch.utils.data._utils import collate,",
"torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def",
"torch._six import queue import os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper,",
"closed, Py3 will error in this case, but Py2 will just time #",
"if not all(w.is_alive() for w in self.workers): pids_str = ', '.join(str(w.pid) for w",
"time as it needs to # start a process and pass the arguments",
"end. # # FIXME: Unfortunately, for Windows, we are missing a worker #",
"self.worker_result_queue.close() # Exit workers now. for q in self.index_queues: q.put(None) # Indicate that",
"though all this function does is putting into queues that # we have",
"= self._get_batch() self.batches_outstanding -= 1 if idx != self.rcvd_idx: # store out-of-order samples",
"def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id): # See NOTE [",
"self.manager_dead = False def is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid",
"q.close() for w in self.workers: w.join() finally: # Even though all this function",
"-= 1 if idx != self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] = batch",
"timed out after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch()",
"which `pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'): # Use hasattr in case",
"/ iterator is depleted. # See (1) and the second half of the",
"this is by moving the sample pushing # to a separate thread and",
"__init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self,",
"this loader thread. If the handle is # closed, Py3 will error in",
"collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes import",
"else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set =",
"self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown",
"= self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True #",
"# In this case, `self.data_queue` is a `queue.Queue`,. But we don't # need",
"_utils.ExceptionWrapper): # make multiline KeyError msg readable by working around # a python",
"Shutdown Logic ] for details on # the logic of this function. python_exit_status",
"batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO: add limited pickling",
"signal # (None) yet. I will keep continuing until get it, and skip",
"data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt: # Main process will",
"self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout =",
"while condition is false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread exited unexpectedly')",
"if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while True: assert (not self.shutdown and",
"# Use hasattr in case error happens before we set the attribute. #",
"can # happen when a worker is killed by a signal, e.g., hanging",
"batch.exc_type == KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise",
"self.pin_memory_thread.join() # Indicate that no more data will be put on this queue",
"target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to workers",
"self.num_workers indices = next(self.sample_iter, None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx",
"received the final signal # (None) yet. I will keep continuing until get",
"queue. self.worker_result_queue.close() # Exit workers now. for q in self.index_queues: q.put(None) # Indicate",
"at timeouts. if self.timeout > 0: success, data = self._try_get_batch(self.timeout) if success: return",
"Shutdown Logic ] for details on the # logic of this function. try:",
"return (False, None) raise def _get_batch(self): # Fetches data from `self.data_queue`. # #",
"sys from torch._six import queue import os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL,",
"errors # and timeouts. # # Returns a 2-tuple: # (bool: whether successfully",
"= loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory =",
"self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None,",
"Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt: # Main",
"on the # logic of this function. try: collate._use_shared_memory = True # Intialize",
"gone, and the only way to check it through OS is to let",
"None: # Received the final signal assert done_event.is_set() return elif done_event.is_set(): # Done",
"to do this is by moving the sample pushing # to a separate",
"logic of this function. try: collate._use_shared_memory = True # Intialize C side signal",
"= ', '.join(str(w.pid) for w in self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker",
"multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown = False self.send_idx = 0",
"we don't use `.join()`. else: while True: success, data = self._try_get_batch() if success:",
"it, and skip the # processing steps. continue idx, batch_indices = r try:",
"1 self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch,",
"Data Loader Multiprocessing Shutdown Logic ] for details on the # logic of",
"e.g., hanging in # `Event.set()`. So we need to guard this with SIGCHLD",
"across multiple threads for HOGWILD. # Probably the best way to do this",
"ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt: # Main process will raise",
"# while condition is false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread exited",
"# Intialize C side signal handlers for SIGBUS and SIGSEGV. Python signal #",
"store out-of-order samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next = __next__ #",
"weird things can # happen when a worker is killed by a signal,",
"leave # corrupted data in `worker_result_queue` which `pin_memory_thread` # reads from. if hasattr(self,",
"Data Loader Multiprocessing Shutdown Logic ] for details on # the logic of",
"are missing a worker # error detection mechanism here in this function, as",
"'pin_memory_thread'): # Use hasattr in case error happens before we set the attribute.",
"above), we only register # pin_memory_thread once it is started. self.pin_memory_thread = pin_memory_thread",
"(not self.shutdown and self.batches_outstanding > 0) idx, batch = self._get_batch() self.batches_outstanding -= 1",
"# a python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\" in batch.exc_msg:",
"for non-Windows platforms), or the manual check below on errors # and timeouts.",
"self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success: return data else: #",
"else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt: # Main process will raise KeyboardInterrupt",
"take some time as it needs to # start a process and pass",
"multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed +",
"handles with this loader thread. If the handle is # closed, Py3 will",
"_utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader):",
"sampler and online train loader class HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset",
"we are missing a worker # error detection mechanism here in this function,",
"things can # happen when a worker is killed by a signal, e.g.,",
"[ Data Loader Multiprocessing Shutdown Logic ] for details on # the logic",
"= [] self.workers = [] for i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread()",
"= ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes =",
"same fatal signal had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed)",
"samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next = __next__ # Python 2",
"only mechanism to detect worker failures for # Windows. For other platforms, a",
"_worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id): # See NOTE [ Data",
"exit when last reference is gone / iterator is depleted. # See (1)",
"handler is also used for # worker failure detection. # # If `pin_memory=True`,",
"while True: assert (not self.shutdown and self.batches_outstanding > 0) idx, batch = self._get_batch()",
"else: while True: success, data = self._try_get_batch() if success: return data def __next__(self):",
"= True try: self.done_event.set() # Exit `pin_memory_thread` first because exiting workers may leave",
"= loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler)",
"this queue by the # current process. q.close() for w in self.workers: w.join()",
"is # closed, Py3 will error in this case, but Py2 will just",
"to fetch data from `data_queue` for a given timeout. This can # also",
"`data_queue` for a given timeout. This can # also be used as inner",
"`cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no",
"DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value",
"`self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is the only mechanism to detect worker",
"python_exit_status is None: # See (2) of the note. If Python is shutting",
"import queue import os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS",
"init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue",
"generated if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding ==",
"shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle,",
"try: self.done_event.set() # Exit `pin_memory_thread` first because exiting workers may leave # corrupted",
"API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): # See NOTE [ Data",
"haven't received the final signal # (None) yet. I will keep continuing until",
"# Received the final signal assert done_event.is_set() return elif done_event.is_set(): # Done event",
"of the note. if not self.shutdown: self.shutdown = True try: self.done_event.set() # Exit",
"= multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i))",
"BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD #",
"is None: # Received the final signal assert done_event.is_set() return elif done_event.is_set(): #",
"after # `pin_memory_thread` is joined because that thread shares the # same pipe",
"or the manual check below on errors # and timeouts. # # Returns",
"= multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown = False self.send_idx =",
"At timeout and error, we manually check whether any worker has # failed.",
"{} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success: return",
"dataset.__before_hook__() samples = collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except Exception: # It",
"self.worker_pids_set = True # prime the prefetch loop for _ in range(2 *",
"loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter",
"Process.start() actually take some time as it needs to # start a process",
"data else: # while condition is false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory",
"logic of this function. python_exit_status = _utils.python_exit_status if python_exit_status is True or python_exit_status",
"SIGSEGV. Python signal # module's handlers are executed after Python returns from C",
"next = __next__ # Python 2 compatibility def __iter__(self): return self def _put_indices(self):",
"Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except",
"Loader Multiprocessing Shutdown Logic ] for details on the # logic of this",
"second half of the note. if not self.shutdown: self.shutdown = True try: self.done_event.set()",
"self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory",
"manager process # is gone, and the only way to check it through",
"shares the # same pipe handles with this loader thread. If the handle",
"from torch._six import queue import os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\",
"which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is the",
"in self.workers): pids_str = ', '.join(str(w.pid) for w in self.workers if not w.is_alive())",
"# processing steps. continue idx, batch_indices = r try: dataset.__before_hook__() samples = collate_fn([dataset[i]",
"only add a worker to self.workers list after # it started, so that",
"continue return self._process_next_batch(batch) next = __next__ # Python 2 compatibility def __iter__(self): return",
"# of the manager and ask if the process status has changed. class",
"timeout. This can # also be used as inner loop of fetching without",
"!= self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next",
"no-op. return # Normal exit when last reference is gone / iterator is",
"Logic ] for details on # the logic of this function. python_exit_status =",
"after # it started, so that we do not call .join() if program",
"w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False,",
"# FIXME: Unfortunately, for Windows, we are missing a worker # error detection",
"self.batches_outstanding < 2 * self.num_workers indices = next(self.sample_iter, None) if indices is None:",
"Windows to detect # worker failures. if not all(w.is_alive() for w in self.workers):",
"index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon = True #",
"= True # NB: Process.start() actually take some time as it needs to",
"successfully get data, any: data if successful else None) try: data = self.data_queue.get(timeout=timeout)",
"# Similar to workers (see comment above), we only register # pin_memory_thread once",
"self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while True: assert (not self.shutdown",
"True try: self.done_event.set() # Exit `pin_memory_thread` first because exiting workers may leave #",
"is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding",
"again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None:",
"the final signal assert done_event.is_set() return elif done_event.is_set(): # Done event is set.",
"__next__(self): if self.num_workers == 0: # same-process loading indices = next(self.sample_iter) # may",
"is_alive(self): if not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0)",
"error detection mechanism here in this function, as it # doesn't provide a",
"the attribute. # First time do `worker_result_queue.put` in this process. # `cancel_join_thread` in",
"queue.Empty: continue if r is None: # Received the final signal assert done_event.is_set()",
"indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check if the next",
"Even though all this function does is putting into queues that # we",
"putting into queues that # we have called `cancel_join_thread` on, weird things can",
"without timeout, with the # sender status as the loop condition. # #",
"from the C side data structure only at the # end. # #",
"Similar to workers (see comment above), we only register # pin_memory_thread once it",
"https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id) watchdog",
"next(self.sample_iter) # may raise StopIteration batch = self.collate_fn([self.dataset[i] for i in indices]) if",
"worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None) raise def",
"!= self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn,",
"for i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset,",
"add limited pickling support for sharing an iterator # across multiple threads for",
"is killed by a signal, e.g., hanging in # `Event.set()`. So we need",
"__getstate__(self): # TODO: add limited pickling support for sharing an iterator # across",
"and online train loader class HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset self.collate_fn",
"assert self.batches_outstanding < 2 * self.num_workers indices = next(self.sample_iter, None) if indices is",
"+= 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg readable by",
"w.daemon = True # NB: Process.start() actually take some time as it needs",
"over via a pipe. # Therefore, we only add a worker to self.workers",
"exiting workers may leave # corrupted data in `worker_result_queue` which `pin_memory_thread` # reads",
"from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle:",
"idx != self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch)",
"data else: raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive():",
"loading indices = next(self.sample_iter) # may raise StopIteration batch = self.collate_fn([self.dataset[i] for i",
"ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE,",
"check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)`",
"dies # before it starts, and __del__ tries to join but will get:",
"Windows. For other platforms, a SIGCHLD handler is also used for # worker",
"can only join a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue =",
"and skip the # processing steps. continue idx, batch_indices = r try: dataset.__before_hook__()",
"in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise",
"current process. This **must** be called after # `pin_memory_thread` is joined because that",
"killed by a signal, e.g., hanging in # `Event.set()`. So we need to",
"(pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None) raise def _get_batch(self):",
"let the worker have a process handle # of the manager and ask",
"do not call .join() if program dies # before it starts, and __del__",
"None) try: data = self.data_queue.get(timeout=timeout) return (True, data) except Exception as e: #",
"by working around # a python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and",
"and the second half of the note. if not self.shutdown: self.shutdown = True",
"For other platforms, a SIGCHLD handler is also used for # worker failure",
"ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is",
"and then just sharing the data queue # but signalling the end is",
"process. q.close() for w in self.workers: w.join() finally: # Even though all this",
"be called after # `pin_memory_thread` is joined because that thread shares the #",
"self.shutdown and self.batches_outstanding > 0) idx, batch = self._get_batch() self.batches_outstanding -= 1 if",
"self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed, init_fn, worker_id): # See NOTE",
"raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None)",
"# reads from. if hasattr(self, 'pin_memory_thread'): # Use hasattr in case error happens",
"self.batches_outstanding -= 1 if idx != self.rcvd_idx: # store out-of-order samples self.reorder_dict[idx] =",
"timeout, with the # sender status as the loop condition. # # This",
"torch.multiprocessing as multiprocessing import threading from torch.utils.data import _utils import torch import random",
"manager and ask if the process status has changed. class ManagerWatchdog(object): def __init__(self):",
"__len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from `data_queue`",
"= False def is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return",
"the worker have a process handle # of the manager and ask if",
"in this case, but Py2 will just time # out even if there",
"in batch_indices]) dataset.__after_hook__() except Exception: # It is important that we don't store",
"signal, e.g., hanging in # `Event.set()`. So we need to guard this with",
"even if there is data in the queue. self.worker_result_queue.close() # Exit workers now.",
"reads from. if hasattr(self, 'pin_memory_thread'): # Use hasattr in case error happens before",
"', '.join(str(w.pid) for w in self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s)",
"# On Windows, the parent ID of the worker process remains unchanged when",
"`Event.set()`. So we need to guard this with SIGCHLD handler, # and remove",
"Python signal # module's handlers are executed after Python returns from C low-level",
"signal # module's handlers are executed after Python returns from C low-level #",
"# Returns a 2-tuple: # (bool: whether successfully get data, any: data if",
"be pickled\") def _shutdown_workers(self): # See NOTE [ Data Loader Multiprocessing Shutdown Logic",
"worker is killed by a signal, e.g., hanging in # `Event.set()`. So we",
"the # current process. This **must** be called after # `pin_memory_thread` is joined",
"the process status has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32",
"_utils.pin_memory.pin_memory_batch(batch) return batch # check if the next sample has already been generated",
"`worker_result_queue` which `pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'): # Use hasattr in",
"joined because that thread shares the # same pipe handles with this loader",
"process and pass the arguments over via a pipe. # Therefore, we only",
"fatal signal had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed)",
"for _ in range(2 * self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self,",
"HOGWILD. # Probably the best way to do this is by moving the",
"del samples except KeyboardInterrupt: # Main process will raise KeyboardInterrupt anyways. pass #",
"check below on errors # and timeouts. # # Returns a 2-tuple: #",
"see NOTE [ Python Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx,",
"workers now. for q in self.index_queues: q.put(None) # Indicate that no more data",
"Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if",
"by a signal, e.g., hanging in # `Event.set()`. So we need to guard",
"self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter =",
"had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if",
"this function. try: collate._use_shared_memory = True # Intialize C side signal handlers for",
"more data will be put on this queue by the # current process.",
"Py3 will error in this case, but Py2 will just time # out",
"on this queue by the # current process. q.close() for w in self.workers:",
"pids from the C side data structure only at the # end. #",
"to # start a process and pass the arguments over via a pipe.",
"queues that # we have called `cancel_join_thread` on, weird things can # happen",
"MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD, BOOL,",
"timeouts. # # Returns a 2-tuple: # (bool: whether successfully get data, any:",
"that we don't store exc_info in a variable, # see NOTE [ Python",
"process. # `cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate",
"changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes",
"self.workers): pids_str = ', '.join(str(w.pid) for w in self.workers if not w.is_alive()) raise",
"# current process. q.close() for w in self.workers: w.join() finally: # Even though",
"iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx =",
"if not self.shutdown: self.shutdown = True try: self.done_event.set() # Exit `pin_memory_thread` first because",
"there is data in the queue. self.worker_result_queue.close() # Exit workers now. for q",
"if batch.exc_type == KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else:",
"likely when the same fatal signal had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers",
"self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0:",
"handlers, likely when the same fatal signal had already happened # again. #",
"# Tries to fetch data from `data_queue` for a given timeout. This can",
"# At timeout and error, we manually check whether any worker has #",
"= HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from",
"FIXME: Unfortunately, for Windows, we are missing a worker # error detection mechanism",
"a `RuntimeError` if any worker died expectedly. This error # can come from",
"mechanism for Windows to detect # worker failures. if not all(w.is_alive() for w",
"Windows, the parent ID of the worker process remains unchanged when the manager",
"for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the prefetch loop",
"= collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except Exception: # It is important",
"torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not None: init_fn(worker_id) watchdog = ManagerWatchdog()",
"loop. This is the only mechanism to detect worker failures for # Windows.",
"need to guard this with SIGCHLD handler, # and remove pids from the",
"details on # the logic of this function. python_exit_status = _utils.python_exit_status if python_exit_status",
"batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg",
"multiprocessing import threading from torch.utils.data import _utils import torch import random import sys",
"self.num_workers self.batches_outstanding += 1 self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx += 1",
"q.put(None) # Indicate that no more data will be put on this queue",
"batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check if the next sample has already",
"KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return",
"from `self.data_queue`. # # We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which",
"2 * self.num_workers indices = next(self.sample_iter, None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx,",
"self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start() # Similar to workers (see comment above), we",
"because exiting workers may leave # corrupted data in `worker_result_queue` which `pin_memory_thread` #",
"# Balanced batch sampler and online train loader class HookDataloderIter(object): def __init__(self, loader):",
"in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the prefetch loop for _",
"either the SIGCHLD handler in `_utils/signal_handling.py` # (only for non-Windows platforms), or the",
"iterator is depleted. # See (1) and the second half of the note.",
"= self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while True:",
"program dies # before it starts, and __del__ tries to join but will",
"https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead else: class ManagerWatchdog(object):",
"already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn",
"__iter__(self): return self def _put_indices(self): assert self.batches_outstanding < 2 * self.num_workers indices =",
"separate thread and then just sharing the data queue # but signalling the",
"= 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown =",
"First time do `worker_result_queue.put` in this process. # `cancel_join_thread` in case that `pin_memory_thread`",
"_process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError",
"collate._use_shared_memory = True # Intialize C side signal handlers for SIGBUS and SIGSEGV.",
"% self.num_workers self.batches_outstanding += 1 self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx +=",
"also be used as inner loop of fetching without timeout, with the #",
"achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is the only mechanism",
"success: return data def __next__(self): if self.num_workers == 0: # same-process loading indices",
"the SIGCHLD handler in `_utils/signal_handling.py` # (only for non-Windows platforms), or the manual",
"0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate,",
"# we have called `cancel_join_thread` on, weird things can # happen when a",
"moving the sample pushing # to a separate thread and then just sharing",
"next sample has already been generated if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx)",
"w in self.workers): pids_str = ', '.join(str(w.pid) for w in self.workers if not",
"fetch data from `data_queue` for a given timeout. This can # also be",
"= self._try_get_batch(self.timeout) if success: return data else: raise RuntimeError('DataLoader timed out after {}",
"check if the next sample has already been generated if self.rcvd_idx in self.reorder_dict:",
"Windows, we are missing a worker # error detection mechanism here in this",
"in self.index_queues: q.put(None) # Indicate that no more data will be put on",
"gone / iterator is depleted. # See (1) and the second half of",
"It is important that we don't store exc_info in a variable, # see",
"= self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def",
"down, do no-op. return # Normal exit when last reference is gone /",
"also need check if `pin_memory_thread` had # died at timeouts. if self.timeout >",
"# `cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that",
"any: data if successful else None) try: data = self.data_queue.get(timeout=timeout) return (True, data)",
"in this process. # `cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join()",
"This error # can come from either the SIGCHLD handler in `_utils/signal_handling.py` #",
"while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is None:",
"as it needs to # start a process and pass the arguments over",
"= ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r",
"steps. continue idx, batch_indices = r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i",
"self.workers: w.join() finally: # Even though all this function does is putting into",
"try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty: continue if r is None: # Received",
"in this function, as it # doesn't provide a SIGCHLD handler. if self.worker_pids_set:",
"remains unchanged when the manager process # is gone, and the only way",
"False self.shutdown = False self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict = {}",
"self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding",
"== 0: # same-process loading indices = next(self.sample_iter) # may raise StopIteration batch",
"worker_id): # See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details",
"signal assert done_event.is_set() return elif done_event.is_set(): # Done event is set. But I",
"process # is gone, and the only way to check it through OS",
"for Windows, we are missing a worker # error detection mechanism here in",
"no more data will be put on this queue by the # current",
"process handle # of the manager and ask if the process status has",
"(DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD",
"from torch.utils.data import _utils import torch import random import sys from torch._six import",
"import _utils import torch import random import sys from torch._six import queue import",
"shutting down, do no-op. return # Normal exit when last reference is gone",
"pickling support for sharing an iterator # across multiple threads for HOGWILD. #",
"torch.cuda.is_available() self.timeout = loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers >",
"# see NOTE [ Python Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else:",
"_shutdown_workers(self): # See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details",
"queue by the # current process. This **must** be called after # `pin_memory_thread`",
"ID of the worker process remains unchanged when the manager process # is",
"same pipe handles with this loader thread. If the handle is # closed,",
"= multiprocessing.Event() self.index_queues = [] self.workers = [] for i in range(self.num_workers): index_queue",
"= 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead",
"unchanged when the manager process # is gone, and the only way to",
"call `.task_done()` because we don't use `.join()`. else: while True: success, data =",
"to join but will get: # AssertionError: can only join a started process.",
"pickled\") def _shutdown_workers(self): # See NOTE [ Data Loader Multiprocessing Shutdown Logic ]",
"provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if",
"= torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue",
"handler in `_utils/signal_handling.py` # (only for non-Windows platforms), or the manual check below",
"timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout,",
"# pin_memory_thread once it is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue",
"None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices)) self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding +=",
"import threading from torch.utils.data import _utils import torch import random import sys from",
"import random import sys from torch._six import queue import os from torch.utils.data._utils import",
"# See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details on",
"i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread exited unexpectedly') # In this case,",
"SIGBUS and SIGSEGV. Python signal # module's handlers are executed after Python returns",
"the loop condition. # # This raises a `RuntimeError` if any worker died",
"not all(w.is_alive() for w in self.workers): pids_str = ', '.join(str(w.pid) for w in",
"indices = next(self.sample_iter) # may raise StopIteration batch = self.collate_fn([self.dataset[i] for i in",
"already been generated if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if",
"SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error())",
"# module's handlers are executed after Python returns from C low-level # handlers,",
"# store out-of-order samples self.reorder_dict[idx] = batch continue return self._process_next_batch(batch) next = __next__",
"This **must** be called after # `pin_memory_thread` is joined because that thread shares",
"queue.Queue() pin_memory_thread = threading.Thread( target=_utils.pin_memory._pin_memory_loop, args=(self.worker_result_queue, self.data_queue, torch.cuda.current_device(), self.done_event)) pin_memory_thread.daemon = True pin_memory_thread.start()",
"0) == 0 return not self.manager_dead else: class ManagerWatchdog(object): def __init__(self): self.manager_pid =",
"error in this case, but Py2 will just time # out even if",
"and the only way to check it through OS is to let the",
"as the loop condition. # # This raises a `RuntimeError` if any worker",
"def __getstate__(self): # TODO: add limited pickling support for sharing an iterator #",
"+= 1 self.send_idx += 1 def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if",
"for details on the # logic of this function. try: collate._use_shared_memory = True",
"it started, so that we do not call .join() if program dies #",
"`pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None) self.pin_memory_thread.join() # Indicate that no more data will be",
"Unfortunately, for Windows, we are missing a worker # error detection mechanism here",
"to workers (see comment above), we only register # pin_memory_thread once it is",
"import DWORD, BOOL, HANDLE # On Windows, the parent ID of the worker",
"is tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self):",
"final signal # (None) yet. I will keep continuing until get it, and",
"data, any: data if successful else None) try: data = self.data_queue.get(timeout=timeout) return (True,",
"failure detection. # # If `pin_memory=True`, we also need check if `pin_memory_thread` had",
"seed, init_fn, worker_id): # See NOTE [ Data Loader Multiprocessing Shutdown Logic ]",
"assert done_event.is_set() return elif done_event.is_set(): # Done event is set. But I haven't",
"may leave # corrupted data in `worker_result_queue` which `pin_memory_thread` # reads from. if",
"] for details on the # logic of this function. try: collate._use_shared_memory =",
"return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from `data_queue` for",
"a signal, e.g., hanging in # `Event.set()`. So we need to guard this",
"See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details on the",
"process. This **must** be called after # `pin_memory_thread` is joined because that thread",
"HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx",
"in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self):",
"list after # it started, so that we do not call .join() if",
"w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set = True # prime the prefetch loop for",
"Received the final signal assert done_event.is_set() return elif done_event.is_set(): # Done event is",
"if IS_WINDOWS: import ctypes from ctypes.wintypes import DWORD, BOOL, HANDLE # On Windows,",
"= loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn",
"is gone / iterator is depleted. # See (1) and the second half",
"it needs to # start a process and pass the arguments over via",
"success, data = self._try_get_batch() if success: return data else: # while condition is",
"for HOGWILD. # Probably the best way to do this is by moving",
"self.collate_fn, base_seed + i, self.worker_init_fn, i)) w.daemon = True # NB: Process.start() actually",
"> 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0,",
"True # NB: Process.start() actually take some time as it needs to #",
"This can # also be used as inner loop of fetching without timeout,",
"raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if not self.manager_dead: # Value obtained",
"= self._try_get_batch() if success: return data def __next__(self): if self.num_workers == 0: #",
"once it is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid",
"Python 2 compatibility def __iter__(self): return self def _put_indices(self): assert self.batches_outstanding < 2",
"# (None) yet. I will keep continuing until get it, and skip the",
"init_fn is not None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r =",
"batch continue return self._process_next_batch(batch) next = __next__ # Python 2 compatibility def __iter__(self):",
"data = self._try_get_batch(self.timeout) if success: return data else: raise RuntimeError('DataLoader timed out after",
"a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): # See NOTE",
"= _utils.python_exit_status if python_exit_status is True or python_exit_status is None: # See (2)",
"if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if self.num_workers > 0: self._shutdown_workers()",
"(None) yet. I will keep continuing until get it, and skip the #",
"data will be put on this queue by the # current process. This",
"If the handle is # closed, Py3 will error in this case, but",
"batch_indices]) dataset.__after_hook__() except Exception: # It is important that we don't store exc_info",
"KeyboardInterrupt anyways. pass # Balanced batch sampler and online train loader class HookDataloderIter(object):",
"data from `self.data_queue`. # # We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, #",
"__del__(self): if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False,",
"# Exit workers now. for q in self.index_queues: q.put(None) # Indicate that no",
"inner loop of fetching without timeout, with the # sender status as the",
"`worker_result_queue.put` in this process. # `cancel_join_thread` in case that `pin_memory_thread` exited. self.worker_result_queue.cancel_join_thread() self.worker_result_queue.put(None)",
"`.task_done()` because we don't use `.join()`. else: while True: success, data = self._try_get_batch()",
"and SIGSEGV. Python signal # module's handlers are executed after Python returns from",
"self.workers if not w.is_alive()) raise RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e,",
"that no more data will be put on this queue by the #",
"batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object):",
"batch sampler and online train loader class HookDataloderIter(object): def __init__(self, loader): self.dataset =",
"store exc_info in a variable, # see NOTE [ Python Traceback Reference Cycle",
"# failed. Note that this is the only mechanism for Windows to detect",
"the manager and ask if the process status has changed. class ManagerWatchdog(object): def",
"= False self.shutdown = False self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict =",
"= r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except",
"sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class",
"] for details on # the logic of this function. python_exit_status = _utils.python_exit_status",
"batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while",
"shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self)",
"failures. if not all(w.is_alive() for w in self.workers): pids_str = ', '.join(str(w.pid) for",
"also used for # worker failure detection. # # If `pin_memory=True`, we also",
"detection mechanism here in this function, as it # doesn't provide a SIGCHLD",
"if success: return data else: raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif",
"while True: success, data = self._try_get_batch() if success: return data def __next__(self): if",
"process will raise KeyboardInterrupt anyways. pass # Balanced batch sampler and online train",
"in self.workers: w.join() finally: # Even though all this function does is putting",
"= os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype =",
"signalling the end is tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be",
"0: self._shutdown_workers() raise StopIteration while True: assert (not self.shutdown and self.batches_outstanding > 0)",
"1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg readable by working",
"This is the only mechanism to detect worker failures for # Windows. For",
"process remains unchanged when the manager process # is gone, and the only",
"to let the worker have a process handle # of the manager and",
"# and remove pids from the C side data structure only at the",
"# # FIXME: Unfortunately, for Windows, we are missing a worker # error",
"for # worker failure detection. # # If `pin_memory=True`, we also need check",
"is also used for # worker failure detection. # # If `pin_memory=True`, we",
"is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w",
"put on this queue by the # current process. q.close() for w in",
"NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details on # the",
"base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0",
"case, `self.data_queue` is a `queue.Queue`,. But we don't # need to call `.task_done()`",
"drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object): def __before_hook__(self): pass def",
"msg readable by working around # a python bug https://bugs.python.org/issue2651 if batch.exc_type ==",
"os.getppid() != self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue, data_queue, done_event, collate_fn, seed,",
"loop of fetching without timeout, with the # sender status as the loop",
"but Py2 will just time # out even if there is data in",
"* self.num_workers): self._put_indices() def __len__(self): return len(self.batch_sampler) def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to",
"without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): # See",
"prime the prefetch loop for _ in range(2 * self.num_workers): self._put_indices() def __len__(self):",
"self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle =",
"i, self.worker_init_fn, i)) w.daemon = True # NB: Process.start() actually take some time",
"`self.data_queue` is a `queue.Queue`,. But we don't # need to call `.task_done()` because",
"NOTE [ Python Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples))",
"self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return",
"self.done_event.set() # Exit `pin_memory_thread` first because exiting workers may leave # corrupted data",
"if not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return not self.manager_dead def _worker_loop(dataset,",
"is the only mechanism to detect worker failures for # Windows. For other",
"\"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def",
"except Exception: # It is important that we don't store exc_info in a",
"start a process and pass the arguments over via a pipe. # Therefore,",
"as e: # At timeout and error, we manually check whether any worker",
"self.worker_pids_set = False self.shutdown = False self.send_idx = 0 self.rcvd_idx = 0 self.reorder_dict",
"detect worker failures for # Windows. For other platforms, a SIGCHLD handler is",
"platforms, a SIGCHLD handler is also used for # worker failure detection. #",
"the # same pipe handles with this loader thread. If the handle is",
"a python bug https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\" in batch.exc_msg: raise",
"pin_memory_thread.start() # Similar to workers (see comment above), we only register # pin_memory_thread",
"else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO: add limited pickling support",
"half of the note. if not self.shutdown: self.shutdown = True try: self.done_event.set() #",
"self.manager_pid) if not self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if not",
"be used as inner loop of fetching without timeout, with the # sender",
"for w in self.workers): pids_str = ', '.join(str(w.pid) for w in self.workers if",
"_try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from `data_queue` for a given timeout.",
"workers (see comment above), we only register # pin_memory_thread once it is started.",
"reference is gone / iterator is depleted. # See (1) and the second",
"handlers for SIGBUS and SIGSEGV. Python signal # module's handlers are executed after",
"Logic ] for details on the # logic of this function. try: collate._use_shared_memory",
"= {} self.done_event = multiprocessing.Event() self.index_queues = [] self.workers = [] for i",
"be put on this queue by the # current process. This **must** be",
"on, weird things can # happen when a worker is killed by a",
"Main process will raise KeyboardInterrupt anyways. pass # Balanced batch sampler and online",
"self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers()",
"`.join()`. else: while True: success, data = self._try_get_batch() if success: return data def",
"to guard this with SIGCHLD handler, # and remove pids from the C",
"the handle is # closed, Py3 will error in this case, but Py2",
"data in the queue. self.worker_result_queue.close() # Exit workers now. for q in self.index_queues:",
"worker failures. if not all(w.is_alive() for w in self.workers): pids_str = ', '.join(str(w.pid)",
"SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if self.num_workers >",
"Py2 will just time # out even if there is data in the",
"raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): # See NOTE [ Data Loader",
"0) idx, batch = self._get_batch() self.batches_outstanding -= 1 if idx != self.rcvd_idx: #",
"batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory, drop_last=drop_last, timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return",
"register # pin_memory_thread once it is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue =",
"do this is by moving the sample pushing # to a separate thread",
"self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch #",
"function, as it # doesn't provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set",
"KeyError msg readable by working around # a python bug https://bugs.python.org/issue2651 if batch.exc_type",
"if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue()",
"# which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is",
"Python is shutting down, do no-op. return # Normal exit when last reference",
"raise StopIteration while True: assert (not self.shutdown and self.batches_outstanding > 0) idx, batch",
"i)) w.daemon = True # NB: Process.start() actually take some time as it",
"via a pipe. # Therefore, we only add a worker to self.workers list",
"batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): #",
"the manual check below on errors # and timeouts. # # Returns a",
"only way to check it through OS is to let the worker have",
"(bool: whether successfully get data, any: data if successful else None) try: data",
"from. if hasattr(self, 'pin_memory_thread'): # Use hasattr in case error happens before we",
"raise StopIteration batch = self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory: batch =",
"return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration while True: assert (not",
"only register # pin_memory_thread once it is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue",
"worker failures for # Windows. For other platforms, a SIGCHLD handler is also",
"= __next__ # Python 2 compatibility def __iter__(self): return self def _put_indices(self): assert",
"unexpectedly') # In this case, `self.data_queue` is a `queue.Queue`,. But we don't #",
"self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding == 0: self._shutdown_workers() raise StopIteration",
"to detect # worker failures. if not all(w.is_alive() for w in self.workers): pids_str",
"condition is false, i.e., pin_memory_thread died. raise RuntimeError('Pin memory thread exited unexpectedly') #",
"doesn't provide a SIGCHLD handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self):",
"# out even if there is data in the queue. self.worker_result_queue.close() # Exit",
"train loader class HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn",
"loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers self.pin_memory = loader.pin_memory and torch.cuda.is_available() self.timeout",
"# `pin_memory_thread` is joined because that thread shares the # same pipe handles",
"have a process handle # of the manager and ask if the process",
"the arguments over via a pipe. # Therefore, we only add a worker",
"is joined because that thread shares the # same pipe handles with this",
"self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline KeyError msg readable",
"True or python_exit_status is None: # See (2) of the note. If Python",
"DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0,",
"running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This is the only mechanism to detect",
"we also need check if `pin_memory_thread` had # died at timeouts. if self.timeout",
"way to do this is by moving the sample pushing # to a",
"does is putting into queues that # we have called `cancel_join_thread` on, weird",
"is putting into queues that # we have called `cancel_join_thread` on, weird things",
"# again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread() if init_fn is not",
"pipe. # Therefore, we only add a worker to self.workers list after #",
"= iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx",
"that this is the only mechanism for Windows to detect # worker failures.",
"to call `.task_done()` because we don't use `.join()`. else: while True: success, data",
"whether successfully get data, any: data if successful else None) try: data =",
"0 self.rcvd_idx = 0 self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues = []",
"sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler,",
"the manager process # is gone, and the only way to check it",
"pids_str = ', '.join(str(w.pid) for w in self.workers if not w.is_alive()) raise RuntimeError('DataLoader",
"python_exit_status is True or python_exit_status is None: # See (2) of the note.",
"will be put on this queue by the # current process. q.close() for",
"if any worker died expectedly. This error # can come from either the",
"__init__(self): self.manager_pid = os.getppid() self.manager_dead = False def is_alive(self): if not self.manager_dead: self.manager_dead",
"# Indicate that no more data will be put on this queue by",
"not self.shutdown: self.shutdown = True try: self.done_event.set() # Exit `pin_memory_thread` first because exiting",
"self.manager_handle: raise ctypes.WinError(ctypes.get_last_error()) self.manager_dead = False def is_alive(self): if not self.manager_dead: # Value",
"def _try_get_batch(self, timeout=_utils.MP_STATUS_CHECK_INTERVAL): # Tries to fetch data from `data_queue` for a given",
"get data, any: data if successful else None) try: data = self.data_queue.get(timeout=timeout) return",
"pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers, collate_fn=collate_fn, pin_memory=pin_memory,",
"def is_alive(self): if not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle,",
"not None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except",
"`queue.Queue`,. But we don't # need to call `.task_done()` because we don't use",
"# Probably the best way to do this is by moving the sample",
"don't use `.join()`. else: while True: success, data = self._try_get_batch() if success: return",
"DWORD) self.kernel32.WaitForSingleObject.restype = DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle",
"# We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by",
"# end. # # FIXME: Unfortunately, for Windows, we are missing a worker",
"def __next__(self): if self.num_workers == 0: # same-process loading indices = next(self.sample_iter) #",
"batch # check if the next sample has already been generated if self.rcvd_idx",
"success, data = self._try_get_batch(self.timeout) if success: return data else: raise RuntimeError('DataLoader timed out",
"import os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS:",
"> 0) idx, batch = self._get_batch() self.batches_outstanding -= 1 if idx != self.rcvd_idx:",
"has changed. class ManagerWatchdog(object): def __init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True)",
"self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False,",
"self._process_next_batch(batch) next = __next__ # Python 2 compatibility def __iter__(self): return self def",
"https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE, 0, self.manager_pid) if not self.manager_handle: raise",
"self.worker_queue_idx = (self.worker_queue_idx + 1) % self.num_workers self.batches_outstanding += 1 self.send_idx += 1",
"iterator # across multiple threads for HOGWILD. # Probably the best way to",
"workers' status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` #",
"we have called `cancel_join_thread` on, weird things can # happen when a worker",
"it is started. self.pin_memory_thread = pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for",
"# (bool: whether successfully get data, any: data if successful else None) try:",
"def _process_next_batch(self, batch): self.rcvd_idx += 1 self._put_indices() if isinstance(batch, _utils.ExceptionWrapper): # make multiline",
"the sample pushing # to a separate thread and then just sharing the",
"a 2-tuple: # (bool: whether successfully get data, any: data if successful else",
"support for sharing an iterator # across multiple threads for HOGWILD. # Probably",
"num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None): torch.utils.data.DataLoader.__init__(self, dataset=dataset, batch_size=batch_size, shuffle=shuffle, sampler=sampler, batch_sampler=batch_sampler, num_workers=num_workers,",
"limited pickling support for sharing an iterator # across multiple threads for HOGWILD.",
"guard this with SIGCHLD handler, # and remove pids from the C side",
"parent ID of the worker process remains unchanged when the manager process #",
"__next__ # Python 2 compatibility def __iter__(self): return self def _put_indices(self): assert self.batches_outstanding",
"= batch continue return self._process_next_batch(batch) next = __next__ # Python 2 compatibility def",
"Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info()))) else: data_queue.put((idx, samples)) del samples except KeyboardInterrupt: #",
"don't store exc_info in a variable, # see NOTE [ Python Traceback Reference",
"detection. # # If `pin_memory=True`, we also need check if `pin_memory_thread` had #",
"See (1) and the second half of the note. if not self.shutdown: self.shutdown",
"use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD)",
"with SIGCHLD handler, # and remove pids from the C side data structure",
"but signalling the end is tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot",
"this is the only mechanism for Windows to detect # worker failures. if",
"signal had already happened # again. # https://docs.python.org/3/library/signal.html#execution-of-python-signal-handlers signal_handling._set_worker_signal_handlers() torch.set_num_threads(1) random.seed(seed) torch.manual_seed(seed) data_queue.cancel_join_thread()",
"if r is None: # Received the final signal assert done_event.is_set() return elif",
"corrupted data in `worker_result_queue` which `pin_memory_thread` # reads from. if hasattr(self, 'pin_memory_thread'): #",
"when the manager process # is gone, and the only way to check",
"class HookDataloderIter(object): def __init__(self, loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler =",
"https://bugs.python.org/issue2651 if batch.exc_type == KeyError and \"\\n\" in batch.exc_msg: raise Exception(\"KeyError:\" + batch.exc_msg)",
"r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except Exception:",
"a given timeout. This can # also be used as inner loop of",
"2 compatibility def __iter__(self): return self def _put_indices(self): assert self.batches_outstanding < 2 *",
"the C side data structure only at the # end. # # FIXME:",
"NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): # See NOTE [ Data Loader Multiprocessing",
"the only mechanism for Windows to detect # worker failures. if not all(w.is_alive()",
"Exception(\"KeyError:\" + batch.exc_msg) else: raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO: add",
"I will keep continuing until get it, and skip the # processing steps.",
"with this loader thread. If the handle is # closed, Py3 will error",
"called after # `pin_memory_thread` is joined because that thread shares the # same",
"to check it through OS is to let the worker have a process",
"workers may leave # corrupted data in `worker_result_queue` which `pin_memory_thread` # reads from.",
"self.reorder_dict = {} self.done_event = multiprocessing.Event() self.index_queues = [] self.workers = [] for",
"worker # error detection mechanism here in this function, as it # doesn't",
"status every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in",
"continue idx, batch_indices = r try: dataset.__before_hook__() samples = collate_fn([dataset[i] for i in",
"return data def __next__(self): if self.num_workers == 0: # same-process loading indices =",
"queue import os from torch.utils.data._utils import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if",
"error # can come from either the SIGCHLD handler in `_utils/signal_handling.py` # (only",
"handler, # and remove pids from the C side data structure only at",
"at the # end. # # FIXME: Unfortunately, for Windows, we are missing",
"isinstance(e, queue.Empty): return (False, None) raise def _get_batch(self): # Fetches data from `self.data_queue`.",
"expectedly. This error # can come from either the SIGCHLD handler in `_utils/signal_handling.py`",
"return # Normal exit when last reference is gone / iterator is depleted.",
"from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead = self.kernel32.WaitForSingleObject(self.manager_handle, 0) == 0 return not self.manager_dead else: class",
"handler. if self.worker_pids_set: _utils.signal_handling._remove_worker_pids(id(self)) self.worker_pids_set = False def __del__(self): if self.num_workers > 0:",
"every `MP_STATUS_CHECK_INTERVAL` seconds, # which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a",
"# NB: Process.start() actually take some time as it needs to # start",
"timeout=timeout, worker_init_fn=worker_init_fn) def __iter__(self): return HookDataloderIter(self) class HookDataset(object): def __before_hook__(self): pass def __after_hook__(self):",
"out after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if",
"any worker has # failed. Note that this is the only mechanism for",
"the # current process. q.close() for w in self.workers: w.join() finally: # Even",
"import collate, signal_handling, MP_STATUS_CHECK_INTERVAL, \\ ExceptionWrapper, IS_WINDOWS if IS_WINDOWS: import ctypes from ctypes.wintypes",
"w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event, self.collate_fn, base_seed + i, self.worker_init_fn,",
"variable, # see NOTE [ Python Traceback Reference Cycle Problem ] data_queue.put((idx, ExceptionWrapper(sys.exc_info())))",
"Exception: # It is important that we don't store exc_info in a variable,",
"Tries to fetch data from `data_queue` for a given timeout. This can #",
"TODO: add limited pickling support for sharing an iterator # across multiple threads",
"# current process. This **must** be called after # `pin_memory_thread` is joined because",
"the note. If Python is shutting down, do no-op. return # Normal exit",
"started, so that we do not call .join() if program dies # before",
"loader.timeout self.sample_iter = iter(self.batch_sampler) base_seed = torch.LongTensor(1).random_().item() if self.num_workers > 0: self.worker_init_fn =",
"range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process( target=_worker_loop, args=(self.dataset, index_queue, self.worker_result_queue, self.done_event,",
"= 0 self.worker_pids_set = False self.shutdown = False self.send_idx = 0 self.rcvd_idx =",
"sharing an iterator # across multiple threads for HOGWILD. # Probably the best",
"# the logic of this function. python_exit_status = _utils.python_exit_status if python_exit_status is True",
"self.worker_pids_set = False def __del__(self): if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def",
"os.getppid() self.manager_dead = False def is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid() !=",
"pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler() self.worker_pids_set",
"worker process remains unchanged when the manager process # is gone, and the",
"== 0: self._shutdown_workers() raise StopIteration while True: assert (not self.shutdown and self.batches_outstanding >",
"raise KeyboardInterrupt anyways. pass # Balanced batch sampler and online train loader class",
"False def is_alive(self): if not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx self.manager_dead =",
"the # processing steps. continue idx, batch_indices = r try: dataset.__before_hook__() samples =",
"batch def __getstate__(self): # TODO: add limited pickling support for sharing an iterator",
"= pin_memory_thread else: self.data_queue = self.worker_result_queue _utils.signal_handling._set_worker_pids(id(self), tuple(w.pid for w in self.workers)) _utils.signal_handling._set_SIGCHLD_handler()",
"# # If `pin_memory=True`, we also need check if `pin_memory_thread` had # died",
"raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data",
"for i in batch_indices]) dataset.__after_hook__() except Exception: # It is important that we",
"a started process. w.start() self.index_queues.append(index_queue) self.workers.append(w) if self.pin_memory: self.data_queue = queue.Queue() pin_memory_thread =",
"given timeout. This can # also be used as inner loop of fetching",
"self._try_get_batch(self.timeout) if success: return data else: raise RuntimeError('DataLoader timed out after {} seconds'.format(self.timeout))",
"# but signalling the end is tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter",
"data = self._try_get_batch() if success: return data else: # while condition is false,",
"= False def __del__(self): if self.num_workers > 0: self._shutdown_workers() class HookDataLoader(torch.utils.data.DataLoader): def __init__(self,",
"= True pin_memory_thread.start() # Similar to workers (see comment above), we only register",
"HookDataLoader(torch.utils.data.DataLoader): def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0,",
"batch = self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return",
"= (DWORD, BOOL, DWORD) self.kernel32.OpenProcess.restype = HANDLE self.kernel32.WaitForSingleObject.argtypes = (HANDLE, DWORD) self.kernel32.WaitForSingleObject.restype =",
"loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers = loader.num_workers",
"error, we manually check whether any worker has # failed. Note that this",
"StopIteration batch = self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch)",
"# before it starts, and __del__ tries to join but will get: #",
"if self.pin_memory: batch = _utils.pin_memory.pin_memory_batch(batch) return batch # check if the next sample",
"Fetches data from `self.data_queue`. # # We check workers' status every `MP_STATUS_CHECK_INTERVAL` seconds,",
"and remove pids from the C side data structure only at the #",
"been generated if self.rcvd_idx in self.reorder_dict: batch = self.reorder_dict.pop(self.rcvd_idx) return self._process_next_batch(batch) if self.batches_outstanding",
"= [] for i in range(self.num_workers): index_queue = multiprocessing.Queue() index_queue.cancel_join_thread() w = multiprocessing.Process(",
"not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return not self.manager_dead def _worker_loop(dataset, index_queue,",
"__init__(self): self.manager_pid = os.getppid() self.kernel32 = ctypes.WinDLL('kernel32', use_last_error=True) self.kernel32.OpenProcess.argtypes = (DWORD, BOOL, DWORD)",
"else None) try: data = self.data_queue.get(timeout=timeout) return (True, data) except Exception as e:",
"init_fn, worker_id): # See NOTE [ Data Loader Multiprocessing Shutdown Logic ] for",
"if the next sample has already been generated if self.rcvd_idx in self.reorder_dict: batch",
"used as inner loop of fetching without timeout, with the # sender status",
"samples = collate_fn([dataset[i] for i in batch_indices]) dataset.__after_hook__() except Exception: # It is",
"def is_alive(self): if not self.manager_dead: self.manager_dead = os.getppid() != self.manager_pid return not self.manager_dead",
"raise batch.exc_type(batch.exc_msg) return batch def __getstate__(self): # TODO: add limited pickling support for",
"hasattr in case error happens before we set the attribute. # First time",
"0: # same-process loading indices = next(self.sample_iter) # may raise StopIteration batch =",
"self.worker_init_fn = loader.worker_init_fn self.worker_queue_idx = 0 self.worker_result_queue = multiprocessing.Queue() self.batches_outstanding = 0 self.worker_pids_set",
"Balanced batch sampler and online train loader class HookDataloderIter(object): def __init__(self, loader): self.dataset",
"of this function. try: collate._use_shared_memory = True # Intialize C side signal handlers",
"just time # out even if there is data in the queue. self.worker_result_queue.close()",
"`RuntimeError` if any worker died expectedly. This error # can come from either",
"# error detection mechanism here in this function, as it # doesn't provide",
"= DWORD # Value obtained from https://msdn.microsoft.com/en-us/library/ms684880.aspx SYNCHRONIZE = 0x00100000 self.manager_handle = self.kernel32.OpenProcess(SYNCHRONIZE,",
"tricky without a non-blocking API raise NotImplementedError(\"_DataLoaderIter cannot be pickled\") def _shutdown_workers(self): #",
"NB: Process.start() actually take some time as it needs to # start a",
"note. if not self.shutdown: self.shutdown = True try: self.done_event.set() # Exit `pin_memory_thread` first",
"exited unexpectedly') # In this case, `self.data_queue` is a `queue.Queue`,. But we don't",
"RuntimeError('DataLoader worker (pid(s) {}) exited unexpectedly'.format(pids_str)) if isinstance(e, queue.Empty): return (False, None) raise",
"NOTE [ Data Loader Multiprocessing Shutdown Logic ] for details on the #",
"in a variable, # see NOTE [ Python Traceback Reference Cycle Problem ]",
"self.batches_outstanding = 0 self.worker_pids_set = False self.shutdown = False self.send_idx = 0 self.rcvd_idx",
"def __init__(self, loader): self.dataset = loader.dataset self.collate_fn = loader.collate_fn self.batch_sampler = loader.batch_sampler self.num_workers",
"_put_indices(self): assert self.batches_outstanding < 2 * self.num_workers indices = next(self.sample_iter, None) if indices",
"self.manager_dead = False def is_alive(self): if not self.manager_dead: # Value obtained from https://msdn.microsoft.com/en-us/library/windows/desktop/ms687032.aspx",
"comment above), we only register # pin_memory_thread once it is started. self.pin_memory_thread =",
"other platforms, a SIGCHLD handler is also used for # worker failure detection.",
"seconds, # which we achieve by running `self._try_get_batch(timeout=MP_STATUS_CHECK_INTERVAL)` # in a loop. This",
"* self.num_workers indices = next(self.sample_iter, None) if indices is None: return self.index_queues[self.worker_queue_idx].put((self.send_idx, indices))",
"So we need to guard this with SIGCHLD handler, # and remove pids",
"def __init__(self, dataset, batch_size=1, shuffle=False, sampler=None, batch_sampler=None, num_workers=0, collate_fn=torch.utils.data.dataloader.default_collate, pin_memory=False, drop_last=False, timeout=0, worker_init_fn=None):",
"pin_memory_thread died. raise RuntimeError('Pin memory thread exited unexpectedly') # In this case, `self.data_queue`",
"None: init_fn(worker_id) watchdog = ManagerWatchdog() while watchdog.is_alive(): try: r = index_queue.get(timeout=MP_STATUS_CHECK_INTERVAL) except queue.Empty:",
"may raise StopIteration batch = self.collate_fn([self.dataset[i] for i in indices]) if self.pin_memory: batch",
"after {} seconds'.format(self.timeout)) elif self.pin_memory: while self.pin_memory_thread.is_alive(): success, data = self._try_get_batch() if success:"
] |
[
"ncclCommInitRank(self): ''' Use partial AllReduce to change here. self.nRanks is the number of",
"self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() #",
"self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self):",
"ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx",
"128))] class MPI_NCCL_Communicator(): def __init__(self, stream = None): ''' mpicomm: the MPI communicator,",
"1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id):",
"= 7 ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes = 9 class ncclRedOp_t(Enum):",
"number of threads to use ncclallreduce self.myRank is the rank among these threads.",
"to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator, to use",
"else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value),",
"self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id",
"''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__",
"def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/')",
"= 8 ncclDouble = 8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum = 0",
"is the number of threads to use ncclallreduce self.myRank is the rank among",
"MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm),",
"= c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank()",
"MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm,",
"= np.ones(16)*t.localRank.value print(\"before: = \", arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr",
"arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream",
"= 0 ncclChar = 0 ncclUint8 = 1 ncclInt32 = 2 ncclInt =",
"self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream =",
"dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if",
"self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream =",
"self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop,",
"= 8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1",
"the number of threads to use ncclallreduce self.myRank is the rank among these",
"RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum):",
"stream = None): ''' mpicomm: the MPI communicator, to use in MPI_Bcast, MPI_Reduce,",
"def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId),",
"print(\"before: = \", arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16",
"= 3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator():",
"- 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self,",
"mpicomm: the MPI communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the",
"lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 =",
"c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype, src,",
"= None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self,",
"0 ncclUint8 = 1 ncclInt32 = 2 ncclInt = 2 ncclUint32 = 3",
"stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm),",
"= c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm()",
"else self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src),",
"t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32,",
"\"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file =",
"c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream",
"if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle,",
"= os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() #",
"= 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self, stream",
"executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None):",
"if executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle,",
"myRanks: the rank in all MPI threads localRank: the rank among the MPI",
"executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle,",
"ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self,",
"reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle)",
"be distributed to all ranks in the communicator before calling ncclCommInitRank. stream: the",
"the total number of MPI threads myRanks: the rank in all MPI threads",
"executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce to change here. self.nRanks",
"the value must in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank,",
"in ncclAllReduce ... nRanks: the total number of MPI threads myRanks: the rank",
"in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self):",
"ncclFloat32 = 7 ncclFloat = 7 ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes",
"= c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId =",
"dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream",
"''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if",
"MPI_NCCL_Communicator(): def __init__(self, stream = None): ''' mpicomm: the MPI communicator, to use",
"calling ncclCommInitRank. stream: the stream for NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm",
"output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else",
"= np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum)",
"= ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr,",
"lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank)",
"os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl",
"* import numpy as np from enum import Enum import os def _load_nccl_lib():",
"t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \", arr) arr = ndarray.array(arr, ctx =",
"Use partial AllReduce to change here. self.nRanks is the number of threads to",
"self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream =",
"executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def",
"target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle)",
"c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root,",
"ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr,",
"lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype,",
"use in ncclAllReduce ... nRanks: the total number of MPI threads myRanks: the",
"the communicator before calling ncclCommInitRank. stream: the stream for NCCL communication ''' self.mpicomm",
"''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value",
"MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def",
"ncclSum = 0 ncclProd = 1 ncclMax = 2 ncclMin = 3 ncclNumOps",
"return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8",
"= 6 ncclFloat32 = 7 ncclFloat = 7 ncclFloat64 = 8 ncclDouble =",
"== \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \", arr)",
"ncclFloat = 7 ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes = 9 class",
"MPI threads myRanks: the rank in all MPI threads localRank: the rank among",
"nRanks: the total number of MPI threads myRanks: the rank in all MPI",
"c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream == None:",
"threads myRanks: the rank in all MPI threads localRank: the rank among the",
"the stream for NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks",
"self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def",
"should be distributed to all ranks in the communicator before calling ncclCommInitRank. stream:",
"for NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0)",
"NCCL communicator, to use in ncclAllReduce ... nRanks: the total number of MPI",
"= self.localRank.value if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream",
"dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if",
"self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm,",
"MPI communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator,",
"stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit()",
"(c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self, stream = None): ''' mpicomm: the",
"MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator, to use in ncclAllReduce ...",
"2 ncclMin = 3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))]",
"self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm,",
"lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype,",
"= stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self):",
"None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self): '''",
"numpy as np from enum import Enum import os def _load_nccl_lib(): \"\"\"Load libary",
"self.getLocalRank() self.device_id.value = self.localRank.value if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream",
"must in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def",
"self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size()",
"_load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file",
"self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4",
"def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle",
"MPI threads in this device ncclId: ncclGetUniqueId should be called once when creating",
"= 2 ncclInt = 2 ncclUint32 = 3 ncclInt64 = 4 ncclUint64 =",
"4 python mpi_nccl_comm.py if __name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr =",
"to use in ncclAllReduce ... nRanks: the total number of MPI threads myRanks:",
"= 4 ncclUint64 = 5 ncclFloat16 = 6 ncclHalf = 6 ncclFloat32 =",
"create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self):",
"device ncclId: ncclGetUniqueId should be called once when creating a communicator and the",
"= None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self,",
"ncclcomm: the NCCL communicator, to use in ncclAllReduce ... nRanks: the total number",
"= 1 ncclInt32 = 2 ncclInt = 2 ncclUint32 = 3 ncclInt64 =",
"executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def",
"# NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__ == \"__main__\": t",
"self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun",
"the MPI communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL",
"= os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib",
"4 ncclUint64 = 5 ncclFloat16 = 6 ncclHalf = 6 ncclFloat32 = 7",
"MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__ == \"__main__\":",
"self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize()",
"def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle",
"self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce to",
"import ndarray from athena.stream import * import numpy as np from enum import",
"ctypes import * from athena import ndarray from athena.stream import * import numpy",
"def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root",
"output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype, target,",
"self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def",
"-np 4 python mpi_nccl_comm.py if __name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr",
"mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__ == \"__main__\": t = mpi_nccl_communicator()",
"= 3 ncclInt64 = 4 ncclUint64 = 5 ncclFloat16 = 6 ncclHalf =",
"def __init__(self, stream = None): ''' mpicomm: the MPI communicator, to use in",
"= _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar",
"self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def",
"ncclInt32 = 2 ncclInt = 2 ncclUint32 = 3 ncclInt64 = 4 ncclUint64",
"communicator, to use in ncclAllReduce ... nRanks: the total number of MPI threads",
"class MPI_NCCL_Communicator(): def __init__(self, stream = None): ''' mpicomm: the MPI communicator, to",
"from athena.stream import * import numpy as np from enum import Enum import",
"= mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \", arr) arr = ndarray.array(arr,",
"ncclFloat16 = 6 ncclHalf = 6 ncclFloat32 = 7 ncclFloat = 7 ncclFloat64",
"* t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr,",
"communication ''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank =",
"t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr, ncclDataType_t.ncclFloat32) print(\"after: =",
"lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks,",
"threads localRank: the rank among the MPI threads in this device ncclId: ncclGetUniqueId",
"NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__ == \"__main__\": t =",
"self.localRank.value if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def",
"def dlarraySend(self, arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle",
"class ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1 ncclMax = 2 ncclMin =",
"ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1 ncclMax = 2 ncclMin = 3",
"= 9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1 ncclMax = 2",
"def ncclCommInitRank(self): ''' Use partial AllReduce to change here. self.nRanks is the number",
"localRank: the rank among the MPI threads in this device ncclId: ncclGetUniqueId should",
"MPI threads localRank: the rank among the MPI threads in this device ncclId:",
"the rank among the MPI threads in this device ncclId: ncclGetUniqueId should be",
"def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle",
"= 7 ncclFloat = 7 ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes =",
"def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self,",
"''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0)",
"np.ones(16)*t.localRank.value print(\"before: = \", arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr =",
"def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value)",
"ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr, ncclDataType_t.ncclFloat32) print(\"after: = \",",
"= c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank =",
"c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype,",
"python mpi_nccl_comm.py if __name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value",
"Enum import os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path",
"= 1 ncclMax = 2 ncclMin = 3 ncclNumOps = 4 class ncclUniqueId(Structure):",
"= 0 ncclProd = 1 ncclMax = 2 ncclMin = 3 ncclNumOps =",
"ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator()",
"4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self, stream =",
"7 ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum",
"lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self):",
"output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0)",
"* from athena import ndarray from athena.stream import * import numpy as np",
"mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \", arr) arr = ndarray.array(arr, ctx",
"ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self):",
"AllReduce to change here. self.nRanks is the number of threads to use ncclallreduce",
"curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib =",
"= None): ''' mpicomm: the MPI communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter,",
"lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value =",
"8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1 ncclMax",
"athena.stream import * import numpy as np from enum import Enum import os",
"def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None):",
"class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0 ncclUint8 = 1 ncclInt32 =",
"2 ncclInt = 2 ncclUint32 = 3 ncclInt64 = 4 ncclUint64 = 5",
"ncclMin = 3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class",
"called once when creating a communicator and the Id should be distributed to",
"self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm,",
"os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL)",
"stream: the stream for NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0)",
"input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream",
"7 ncclFloat = 7 ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes = 9",
"if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self):",
"to all ranks in the communicator before calling ncclCommInitRank. stream: the stream for",
"= device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator():",
"once when creating a communicator and the Id should be distributed to all",
"datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else",
"and the Id should be distributed to all ranks in the communicator before",
"NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank",
"ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx =",
"= 6 ncclHalf = 6 ncclFloat32 = 7 ncclFloat = 7 ncclFloat64 =",
"else self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target),",
"= 2 ncclUint32 = 3 ncclInt64 = 4 ncclUint64 = 5 ncclFloat16 =",
"ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr,",
"= ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) #",
"communicator before calling ncclCommInitRank. stream: the stream for NCCL communication ''' self.mpicomm =",
"ncclUint64 = 5 ncclFloat16 = 6 ncclHalf = 6 ncclFloat32 = 7 ncclFloat",
"among the MPI threads in this device ncclId: ncclGetUniqueId should be called once",
"= c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id =",
"def dlarrayRecv(self, arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle",
"ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def",
"CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0 ncclUint8 = 1",
"if executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle,",
"np from enum import Enum import os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\"",
"ncclallreduce self.myRank is the rank among these threads. the value must in [0,",
"lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def",
"ranks in the communicator before calling ncclCommInitRank. stream: the stream for NCCL communication",
"of threads to use ncclallreduce self.myRank is the rank among these threads. the",
"self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank",
"all MPI threads localRank: the rank among the MPI threads in this device",
"from enum import Enum import os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path",
"self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else:",
"ncclMax = 2 ncclMin = 3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8",
"ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1 ncclMax =",
"mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py",
"threads. the value must in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId),",
"number of MPI threads myRanks: the rank in all MPI threads localRank: the",
"lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def",
"= CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL)",
"distributed to all ranks in the communicator before calling ncclCommInitRank. stream: the stream",
"from athena import ndarray from athena.stream import * import numpy as np from",
"MPI_Scatter, etc ncclcomm: the NCCL communicator, to use in ncclAllReduce ... nRanks: the",
"datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else",
"'../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl =",
"datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else",
"return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__ ==",
"''' Use partial AllReduce to change here. self.nRanks is the number of threads",
"lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value),",
"t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \", arr) arr =",
"self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm,",
"be called once when creating a communicator and the Id should be distributed",
"ncclDouble = 8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd =",
"path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib()",
"when creating a communicator and the Id should be distributed to all ranks",
"6 ncclFloat32 = 7 ncclFloat = 7 ncclFloat64 = 8 ncclDouble = 8",
"= None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self,",
"''' mpicomm: the MPI communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm:",
"None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self):",
"should be called once when creating a communicator and the Id should be",
"stream for NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm = c_int64(0) self.nRanks =",
"self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value),",
"0 ncclChar = 0 ncclUint8 = 1 ncclInt32 = 2 ncclInt = 2",
"ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self):",
"ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle,",
"ncclHalf = 6 ncclFloat32 = 7 ncclFloat = 7 ncclFloat64 = 8 ncclDouble",
"2 ncclUint32 = 3 ncclInt64 = 4 ncclUint64 = 5 ncclFloat16 = 6",
"_load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar =",
"executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream = None):",
"def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def",
"CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class",
"lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank()",
"in the communicator before calling ncclCommInitRank. stream: the stream for NCCL communication '''",
"= 2 ncclMin = 3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 *",
"1 ncclMax = 2 ncclMin = 3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\",",
"3 ncclInt64 = 4 ncclUint64 = 5 ncclFloat16 = 6 ncclHalf = 6",
"c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce",
"lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return",
"def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return",
"communicator and the Id should be distributed to all ranks in the communicator",
"self.myRank is the rank among these threads. the value must in [0, self.nRank",
"= 5 ncclFloat16 = 6 ncclHalf = 6 ncclFloat32 = 7 ncclFloat =",
"ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np",
"ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId()",
"ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self, stream = None): '''",
"self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream == None: self.stream",
"self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream",
"# t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr, ncclDataType_t.ncclFloat32) print(\"after: = \", arr.asnumpy()) t.ncclFinish()",
"lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype,",
"= create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def",
"lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' '''",
"ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr, ncclDataType_t.ncclFloat32) print(\"after:",
"_fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self, stream = None): ''' mpicomm:",
"ncclChar = 0 ncclUint8 = 1 ncclInt32 = 2 ncclInt = 2 ncclUint32",
"value must in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank)",
"import os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path =",
"import Enum import os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__)))",
"executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def",
"executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def",
"arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr =",
"5 ncclFloat16 = 6 ncclHalf = 6 ncclFloat32 = 7 ncclFloat = 7",
"= CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0 ncclUint8 =",
"total number of MPI threads myRanks: the rank in all MPI threads localRank:",
"here. self.nRanks is the number of threads to use ncclallreduce self.myRank is the",
"device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize()",
"in this device ncclId: ncclGetUniqueId should be called once when creating a communicator",
"None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray,",
"c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0)",
"else self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce to change here. self.nRanks is",
"self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id",
"if __name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: =",
"from ctypes import * from athena import ndarray from athena.stream import * import",
"use ncclallreduce self.myRank is the rank among these threads. the value must in",
"before calling ncclCommInitRank. stream: the stream for NCCL communication ''' self.mpicomm = c_int64(0)",
"self.device_id.value = self.localRank.value if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream =",
"a communicator and the Id should be distributed to all ranks in the",
"8 ncclDouble = 8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd",
"all ranks in the communicator before calling ncclCommInitRank. stream: the stream for NCCL",
"the rank among these threads. the value must in [0, self.nRank - 1]",
"of MPI threads myRanks: the rank in all MPI threads localRank: the rank",
"as np from enum import Enum import os def _load_nccl_lib(): \"\"\"Load libary in",
"c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value",
"ncclInt = 2 ncclUint32 = 3 ncclInt64 = 4 ncclUint64 = 5 ncclFloat16",
"os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path,",
"else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root),",
"dlarrayRecv(self, arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if",
"--allow-run-as-root -np 4 python mpi_nccl_comm.py if __name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit()",
"self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream",
"ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if",
"ncclAllReduce ... nRanks: the total number of MPI threads myRanks: the rank in",
"athena import ndarray from athena.stream import * import numpy as np from enum",
"ncclId: ncclGetUniqueId should be called once when creating a communicator and the Id",
"import * import numpy as np from enum import Enum import os def",
"np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) #",
"None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self, arr,",
"etc ncclcomm: the NCCL communicator, to use in ncclAllReduce ... nRanks: the total",
"# lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0",
"class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def __init__(self, stream = None):",
"ncclGetUniqueId should be called once when creating a communicator and the Id should",
"self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm,",
"lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use",
"arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream",
"in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator, to use in ncclAllReduce",
"os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl",
"getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray,",
"to use ncclallreduce self.myRank is the rank among these threads. the value must",
"device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): '''",
"self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId",
"if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle,",
"these threads. the value must in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks,",
"def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm),",
"def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank))",
"creating a communicator and the Id should be distributed to all ranks in",
"self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream == None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value))",
"executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def",
"ncclCommInitRank. stream: the stream for NCCL communication ''' self.mpicomm = c_int64(0) self.ncclcomm =",
"... nRanks: the total number of MPI threads myRanks: the rank in all",
"9 class ncclRedOp_t(Enum): ncclSum = 0 ncclProd = 1 ncclMax = 2 ncclMin",
"None): ''' mpicomm: the MPI communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc",
"ndarray from athena.stream import * import numpy as np from enum import Enum",
"mpi_nccl_comm.py if __name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before:",
"executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype, src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value),",
"dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if",
"datatype, executor_stream = None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle)",
"change here. self.nRanks is the number of threads to use ncclallreduce self.myRank is",
"= c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream ==",
"rank among the MPI threads in this device ncclId: ncclGetUniqueId should be called",
"lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\",",
"self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init()",
"ncclUint8 = 1 ncclInt32 = 2 ncclInt = 2 ncclUint32 = 3 ncclInt64",
"0 ncclProd = 1 ncclMax = 2 ncclMin = 3 ncclNumOps = 4",
"= ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr, ncclDataType_t.ncclFloat32)",
"None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr,",
"ncclUint32 = 3 ncclInt64 = 4 ncclUint64 = 5 ncclFloat16 = 6 ncclHalf",
"self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value if stream == None: self.stream =",
"__init__(self, stream = None): ''' mpicomm: the MPI communicator, to use in MPI_Bcast,",
"lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr,",
"__name__ == \"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \",",
"partial AllReduce to change here. self.nRanks is the number of threads to use",
"MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks))",
"= None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self,",
"\", arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value)",
"self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO",
"use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator, to use in",
"executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None):",
"Id should be distributed to all ranks in the communicator before calling ncclCommInitRank.",
"ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0 ncclUint8 = 1 ncclInt32 = 2",
"3 ncclNumOps = 4 class ncclUniqueId(Structure): _fields_=[(\"internal\", (c_int8 * 128))] class MPI_NCCL_Communicator(): def",
"def mpi_nccl_communicator(): ''' ''' return MPI_NCCL_Communicator() # NCCL_DEBUG=INFO mpirun --allow-run-as-root -np 4 python",
"threads to use ncclallreduce self.myRank is the rank among these threads. the value",
"build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib",
"dlarraySend(self, arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if",
"lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0",
"ncclFloat64 = 8 ncclDouble = 8 ncclNumTypes = 9 class ncclRedOp_t(Enum): ncclSum =",
"ncclInt8 = 0 ncclChar = 0 ncclUint8 = 1 ncclInt32 = 2 ncclInt",
"lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayRecv(self, arr, datatype,",
"executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce to change",
"\"__main__\": t = mpi_nccl_communicator() t.ncclInit() arr = np.ones(16)*t.localRank.value print(\"before: = \", arr) arr",
"enum import Enum import os def _load_nccl_lib(): \"\"\"Load libary in build/lib.\"\"\" curr_path =",
"else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def MPI_Finalize(self): lib_mpi_nccl.MPIFinalize() def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm))",
"ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm)) def ncclSetDevice(self, device_id): self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value)",
"c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use partial",
"= os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file,",
"the Id should be distributed to all ranks in the communicator before calling",
"def MPIGetComm(self): lib_mpi_nccl.MPIGetComm(ctypes.byref(self.mpicomm)) def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self):",
"datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value), c_int(target), self.ncclcomm, executor_stream.handle if executor_stream else",
"* 128))] class MPI_NCCL_Communicator(): def __init__(self, stream = None): ''' mpicomm: the MPI",
"rank among these threads. the value must in [0, self.nRank - 1] '''",
"executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream = None):",
"among these threads. the value must in [0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm),",
"arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr",
"[0, self.nRank - 1] ''' lib_mpi_nccl.initNcclCommRank(ctypes.byref(self.ncclcomm), self.nRanks, ctypes.byref(self.ncclId), self.myRank, self.localRank) def ncclCommDestroy(self): lib_mpi_nccl.commDestroyNccl(ctypes.byref(self.ncclcomm))",
"import * from athena import ndarray from athena.stream import * import numpy as",
"threads in this device ncclId: ncclGetUniqueId should be called once when creating a",
"\"lib_mpi_nccl_runtime_api.so\") lib = CDLL(path_to_so_file, RTLD_GLOBAL) return lib lib_mpi_nccl = _load_nccl_lib() # lib_mpi_nccl =",
"None): lib_mpi_nccl.dlarrayAllGather(input_arr.handle, output_arr.handle, c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self, arr,",
"self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream =",
"c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId()",
"self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce to change here. self.nRanks is the",
"in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path, \"lib_mpi_nccl_runtime_api.so\")",
"libary in build/lib.\"\"\" curr_path = os.path.dirname(os.path.abspath(os.path.expanduser(__file__))) lib_path = os.path.join(curr_path, '../../../build/lib/') path_to_so_file = os.path.join(lib_path,",
"to change here. self.nRanks is the number of threads to use ncclallreduce self.myRank",
"ncclInt64 = 4 ncclUint64 = 5 ncclFloat16 = 6 ncclHalf = 6 ncclFloat32",
"the MPI threads in this device ncclId: ncclGetUniqueId should be called once when",
"self.device_id.value = device_id lib_mpi_nccl.setDevice(self.device_id.value) def ncclInit(self): self.ncclSetDevice(self.device_id.value) self.ncclGetUniqueId() self.ncclCommInitRank() def ncclFinish(self): self.MPI_Finalize() def",
"the NCCL communicator, to use in ncclAllReduce ... nRanks: the total number of",
"def MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank,",
"c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream",
"if executor_stream else self.stream.handle) def ncclCommInitRank(self): ''' Use partial AllReduce to change here.",
"import numpy as np from enum import Enum import os def _load_nccl_lib(): \"\"\"Load",
"ncclProd = 1 ncclMax = 2 ncclMin = 3 ncclNumOps = 4 class",
"the rank in all MPI threads localRank: the rank among the MPI threads",
"= ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value = self.localRank.value",
"self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank() self.MPI_Comm_size() self.getLocalRank() self.device_id.value =",
"MPI_Comm_rank(self): lib_mpi_nccl.getMPICommRank(ctypes.byref(self.mpicomm), ctypes.byref(self.myRank)) def MPI_Comm_size(self): lib_mpi_nccl.getMPICommSize(ctypes.byref(self.mpicomm), ctypes.byref(self.nRanks)) def getLocalRank(self): lib_mpi_nccl.getLocalRank(ctypes.byref(self.mpicomm), self.nRanks, self.myRank, ctypes.byref(self.localRank))",
"c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarrayAllGather(self, input_arr, output_arr, datatype, executor_stream",
"src, executor_stream = None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle)",
"communicator, to use in MPI_Bcast, MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator, to",
"RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0 ncclUint8 = 1 ncclInt32",
"= ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value))",
"c_int(datatype.value), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream",
"is the rank among these threads. the value must in [0, self.nRank -",
"dlarray, datatype, reduceop, executor_stream = None): lib_mpi_nccl.dlarrayAllReduce(dlarray.handle, c_int(datatype.value), c_int(reduceop.value), self.ncclcomm, executor_stream.handle if executor_stream",
"output_arr = np.zeros(16 * t.nRanks.value) output_arr = ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32,",
"lib_mpi_nccl = CDLL(\"./lib_mpi_nccl_runtime_api.so\", RTLD_GLOBAL) class ncclDataType_t(Enum): ncclInt8 = 0 ncclChar = 0 ncclUint8",
"== None: self.stream = create_stream_handle(ndarray.gpu(self.device_id.value)) else: self.stream = stream def MPI_Init(self): lib_mpi_nccl.MPIInit() def",
"executor_stream else self.stream.handle) def dlarrayBroadcast(self, dlarray, datatype, root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value),",
"= 0 ncclUint8 = 1 ncclInt32 = 2 ncclInt = 2 ncclUint32 =",
"self.nRanks is the number of threads to use ncclallreduce self.myRank is the rank",
"ndarray.array(output_arr, ctx = ndarray.gpu(t.device_id.value)) t.dlarrayNcclAllReduce(arr, ncclDataType_t.ncclFloat32, ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr,",
"rank in all MPI threads localRank: the rank among the MPI threads in",
"ncclRedOp_t.ncclSum) # t.dlarrayBroadcast(arr, ncclDataType_t.ncclFloat32, 0) # t.dlarrayAllGather(arr, output_arr, ncclDataType_t.ncclFloat32) print(\"after: = \", arr.asnumpy())",
"ctypes.byref(self.localRank)) def ncclGetUniqueId(self): lib_mpi_nccl.getNcclUniqueId(ctypes.byref(self.ncclId), self.mpicomm, self.localRank) def dlarrayNcclAllReduce(self, dlarray, datatype, reduceop, executor_stream =",
"c_int64(0) self.ncclcomm = c_int64(0) self.nRanks = c_int32(0) self.myRank = c_int32(0) self.localRank = c_int32(-1)",
"= \", arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value)) output_arr = np.zeros(16 *",
"arr = np.ones(16)*t.localRank.value print(\"before: = \", arr) arr = ndarray.array(arr, ctx = ndarray.gpu(t.device_id.value))",
"root, executor_stream = None): lib_mpi_nccl.dlarrayBroadcast(dlarray.handle, c_int(datatype.value), c_int(root), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle)",
"MPI_Reduce, MPI_Scatter, etc ncclcomm: the NCCL communicator, to use in ncclAllReduce ... nRanks:",
"this device ncclId: ncclGetUniqueId should be called once when creating a communicator and",
"= None): lib_mpi_nccl.dlarrayRecv(arr.handle, c_int(datatype.value), c_int(src), self.ncclcomm, executor_stream.handle if executor_stream else self.stream.handle) def ncclCommInitRank(self):",
"c_int32(0) self.localRank = c_int32(-1) self.ncclId = ncclUniqueId() self.device_id = c_int(0) self.MPI_Init() self.MPIGetComm() self.MPI_Comm_rank()",
"1 ncclInt32 = 2 ncclInt = 2 ncclUint32 = 3 ncclInt64 = 4",
"6 ncclHalf = 6 ncclFloat32 = 7 ncclFloat = 7 ncclFloat64 = 8",
"in all MPI threads localRank: the rank among the MPI threads in this",
"executor_stream else self.stream.handle) def dlarraySend(self, arr, datatype, target, executor_stream = None): lib_mpi_nccl.dlarraySend(arr.handle, c_int(datatype.value),"
] |
[
"False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ), migrations.AlterIndexTogether( name='solvedhiddenpuzzle', index_together={('player', 'puzzle')}, ),",
"'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={",
"'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'),",
"migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email',",
"Django 2.1.7 on 2019-03-27 13:01 from django.db import migrations, models import django.db.models.deletion class",
"'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'),",
"('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ],",
"serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'),",
"], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'),",
"on 2019-03-27 13:01 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial",
"'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')],",
"'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False,",
"}, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ), migrations.AlterIndexTogether( name='solvedhiddenpuzzle', index_together={('player', 'puzzle')}, ), ]",
"False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13',",
"'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')),",
"models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True,",
"('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id',",
"options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ), migrations.AlterIndexTogether( name='solvedhiddenpuzzle', index_together={('player',",
"to='game.Player')), ], options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ), migrations.AlterIndexTogether(",
"fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'),",
"] operations = [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id',",
"True dependencies = [ ] operations = [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True,",
"('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),",
"], options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ), migrations.AlterIndexTogether( name='solvedhiddenpuzzle',",
"('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish',",
"13:01 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True",
"('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ],",
"('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix',",
"('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, },",
"name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image',",
"('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)),",
"primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel(",
"('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,",
"name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image',",
"= [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20,",
"], options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,",
"('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal',",
"dependencies = [ ] operations = [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,",
"options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),",
"name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True,",
"= True dependencies = [ ] operations = [ migrations.CreateModel( name='Player', fields=[ ('id',",
"verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login',",
"('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'),",
"('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')},",
"from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies",
"('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract':",
"('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse',",
"2019-03-27 13:01 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial =",
"initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Player', fields=[",
"), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky',",
"), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky',",
"primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect',",
"[ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)),",
"to='game.Player')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,",
"import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [",
"import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations =",
"serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle',",
"max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13',",
"[ ] operations = [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),",
"'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ),",
"# Generated by Django 2.1.7 on 2019-03-27 13:01 from django.db import migrations, models",
"'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp',",
"django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies =",
"max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.AlterIndexTogether(",
"class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel(",
"models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle',",
"migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'),",
"Generated by Django 2.1.7 on 2019-03-27 13:01 from django.db import migrations, models import",
"'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'),",
"'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ), migrations.AlterIndexTogether( name='solvedhiddenpuzzle', index_together={('player', 'puzzle')},",
"2.1.7 on 2019-03-27 13:01 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):",
"operations = [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True,",
"migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'),",
"models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages',",
"django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [",
"unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,",
"fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)),",
"models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'), ('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'),",
"models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ),",
"'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle',",
"'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'),",
"('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id',",
"max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,",
"Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Player',",
"models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ] operations",
"('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle',",
"('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere',",
"models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player',",
"('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle',",
"('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()),",
"by Django 2.1.7 on 2019-03-27 13:01 from django.db import migrations, models import django.db.models.deletion",
"migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ]",
"'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'),",
"'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'), ('reverse', 'reverse'), ('finish', 'finish')], max_length=40)), ('timestamp', models.DateTimeField()), ('player',",
"models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.CreateModel( name='SolvedPuzzle', fields=[",
"max_length=40)), ('timestamp', models.DateTimeField()), ('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.CreateModel(",
"models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='game.Player')), ], options={ 'abstract': False, }, ), migrations.AlterIndexTogether( name='solvedpuzzle', index_together={('player', 'puzzle')}, ),",
"'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad', 'keypad'), ('vigenere', 'vigenere'), ('stego_mix', 'stego_mix'),",
"= [ ] operations = [ migrations.CreateModel( name='Player', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,",
"verbose_name='ID')), ('session_id', models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[",
"('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum', 'audio_spectrum'), ('keypad',",
"models.CharField(db_index=True, max_length=20, unique=True)), ('email', models.EmailField(blank=True, max_length=100)), ], ), migrations.CreateModel( name='SolvedHiddenPuzzle', fields=[ ('id', models.AutoField(auto_created=True,",
"}, ), migrations.CreateModel( name='SolvedPuzzle', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('puzzle', models.CharField(choices=[('rot13', 'rot13'),",
"('sky', 'sky'), ('image', 'image'), ('terminal', 'terminal'), ('redirect', 'redirect'), ('login', 'login'), ('pages', 'pages'), ('audio_spectrum',"
] |
[
"or len(password) < 4 or name.isdigit(): # Disallow unames that are numbers to",
"errs = [\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote',",
"if request.method == 'POST': try: uname = request.form['username'].lower() user = get_user(uname) assert user",
"u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password,",
"uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as",
"[\"Permission denied.\"] except Exception as ex: print(ex) errs = [\"Password change failed; mismatching",
"import Blueprint, render_template, abort, request, redirect, url_for from flask.ext.login import LoginManager, UserMixin, login_required,",
"= LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from Flask-Login to make",
"UserExists: errs = [ERR_USER_EXISTS] except Exception as ex: errs = [\"User creation failed;",
"return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = False uname = user.username write_to_db(user)",
"'POST': user.is_superuser = False uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} demoted.\".format(uname))) return",
"def login(): if current_user.is_authenticated(): return redirect('/') if not has_superuser(): return redirect('/firstrun') errs =",
"= request.form['username'] admin = True if 'superuser' in request.form else False create_user(uname, request.form['password'],",
"user = get_user(uname) if user: return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return",
"<a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if not current_user.is_superuser: return",
"user = None if uid and current_user.is_superuser: user = get_user(uid) else: user =",
"@login_required def create_user_page(): if not current_user.is_superuser: return redirect('/') errs = None info =",
"ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password changed for '{}'\".format(uname) except NoPermissionError:",
"login_user(FLUser(user)) return redirect('/') except Exception as ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html',",
"return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid = request.args.get('uid',",
"[\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser(): return",
"pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert pw1 == pw2 if not uname",
"ID catcher raise ValueError() if get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser",
"promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if not current_user.is_superuser:",
"pw2 = request.form['password-confirm'] assert pw1 == pw2 if not uname == current_user.username and",
"errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser(): return redirect('/login') errs = None",
"[ERR_USER_EXISTS] except Exception as ex: errs = [\"User creation failed; mismatching passwords?\"] return",
"if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname,",
"change_password(): uid = request.args.get('uid', None) user = None if uid and current_user.is_superuser: user",
"redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None if not user: return redirect(url_for('auth.manage_accounts')) if",
"UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash from db import",
"@auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if not current_user.is_superuser: return redirect('/') uid =",
"not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually destroy uname =",
"except UserExists: errs = [ERR_USER_EXISTS] except Exception as ex: errs = [\"User creation",
"'POST': # Actually destroy uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {}",
"# Redirect missing attributes to the User object def __getattr__(self, name): return getattr(self.user,",
"None) user = None if uid and current_user.is_superuser: user = get_user(uid) else: user",
"FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception):",
"or len(name) < 3 or len(password) < 4 or name.isdigit(): # Disallow unames",
"changed for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"] except Exception as ex:",
"uname = request.form['username'] admin = True if 'superuser' in request.form else False create_user(uname,",
"render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid = request.args.get('uid', None)",
"not uname == current_user.username and not current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if",
"= request.form['username'].lower() user = get_user(uname) assert user assert check_password(user, request.form['password']) remember = False",
"name) @auth_sys.user_loader def load_user(uname): user = get_user(uname) if user: return FLUser(user) return None",
"render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if not current_user.is_superuser:",
"user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = False uname = user.username",
"password, sudo=False): if not name or not password or len(name) < 3 or",
"def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user = get_user(uname) if",
"Actually destroy uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except",
"< 4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password changed for",
"= get_user(uid) errs = None if not user: return redirect(url_for('auth.manage_accounts')) if request.method ==",
"unauthorized(): return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name, password, sudo=False):",
"if not has_superuser(): return redirect('/firstrun') errs = None if request.method == 'POST': try:",
"Redirect missing attributes to the User object def __getattr__(self, name): return getattr(self.user, name)",
"request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin = True if 'superuser' in request.form",
"not current_user.is_superuser: return redirect('/') info = request.args.get('info', None) errs = [] return render_template('auth/manage.html',",
"are numbers to avoid confusing the ID catcher raise ValueError() if get_user(name.lower()): raise",
"get_user(uid) errs = None if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST':",
"== request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as",
"\"User already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required",
"write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required def",
"= request.args.get('uid', None) user = None if uid and current_user.is_superuser: user = get_user(uid)",
"= request.form['password'] pw2 = request.form['password-confirm'] assert pw1 == pw2 if not uname ==",
"if 'remember' in request.form: remember = True login_user(FLUser(user)) return redirect('/') except Exception as",
"= user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST'])",
"in request.form: remember = True login_user(FLUser(user)) return redirect('/') except Exception as ex: errs",
"destroy uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception",
"<reponame>Emberwalker/Arke from flask import Blueprint, render_template, abort, request, redirect, url_for from flask.ext.login import",
"return redirect('/login') errs = None if request.method == 'POST': try: assert request.form['password'] ==",
"manage_accounts(): if not current_user.is_superuser: return redirect('/') info = request.args.get('info', None) errs = []",
"redirect('/') uid = request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid)",
"= get_user(uname) assert user assert check_password(user, request.form['password']) remember = False if 'remember' in",
"request.form['password-confirm'] uname = request.form['username'] admin = True if 'superuser' in request.form else False",
"render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if not current_user.is_superuser: return redirect('/')",
"pass def create_user(name, password, sudo=False): if not name or not password or len(name)",
"return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user = get_user(uname) if user: return FLUser(user)",
"if request.method == 'POST': user.is_superuser = False uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts',",
"user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid = request.args.get('uid', None) user",
"@auth_sys.user_loader def load_user(uname): user = get_user(uname) if user: return FLUser(user) return None @auth_sys.unauthorized_handler",
"if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = True uname",
"3 or len(password) < 4 or name.isdigit(): # Disallow unames that are numbers",
"# Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin",
"Disallow unames that are numbers to avoid confusing the ID catcher raise ValueError()",
"= [\"Permission denied.\"] except Exception as ex: print(ex) errs = [\"Password change failed;",
"[\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps",
"def unauthorized(): return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name, password,",
"generate_password_hash, check_password_hash from db import User, get_user, has_superuser, write_to_db, delete_from_db, get_all from settings",
"'remember' in request.form: remember = True login_user(FLUser(user)) return redirect('/') except Exception as ex:",
"ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def",
"redirect('/') if not has_superuser(): return redirect('/firstrun') errs = None if request.method == 'POST':",
"= [ERR_USER_EXISTS] except Exception as ex: errs = [\"User creation failed; mismatching passwords?\"]",
"@auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid = request.args.get('uid', None) user = None",
"def demote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not",
"== 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True)",
"LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from Flask-Login to make this",
"werkzeug.security import generate_password_hash, check_password_hash from db import User, get_user, has_superuser, write_to_db, delete_from_db, get_all",
"user: return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError): pass",
"__init__(self, user): self.user = user def get_id(self): return self.user.username # Redirect missing attributes",
"get_id(self): return self.user.username # Redirect missing attributes to the User object def __getattr__(self,",
"name or not password or len(name) < 3 or len(password) < 4 or",
"from db import User, get_user, has_superuser, write_to_db, delete_from_db, get_all from settings import SECRET_KEY",
"@login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated():",
"ex: errs = [\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts')",
"request.method == 'POST': user.is_superuser = True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{}",
"= None if request.method == 'POST': try: uname = request.form['username'].lower() user = get_user(uname)",
"errs = [ERR_USER_EXISTS] except Exception as ex: errs = [\"User creation failed; mismatching",
"pw2 if not uname == current_user.username and not current_user.is_superuser: raise NoPermissionError() u =",
"True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote',",
"methods=['GET', 'POST']) @login_required def promote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid',",
"import generate_password_hash, check_password_hash from db import User, get_user, has_superuser, write_to_db, delete_from_db, get_all from",
"@auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if not current_user.is_superuser: return redirect('/') uid =",
"raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user, password):",
"login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash from db import User, get_user, has_superuser,",
"return redirect('/') info = request.args.get('info', None) errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs,",
"user = get_user(uid) errs = None if not user: return redirect(url_for('auth.manage_accounts')) if request.method",
"write_to_db(u) info = \"Password changed for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"]",
"UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user, password): return",
"import User, get_user, has_superuser, write_to_db, delete_from_db, get_all from settings import SECRET_KEY auth =",
"if get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def",
"redirect('/') info = request.args.get('info', None) errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info)",
"check_password_hash from db import User, get_user, has_superuser, write_to_db, delete_from_db, get_all from settings import",
"NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if not name or not password or",
"__name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use",
"return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser: return redirect('/')",
"template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the",
"False if 'remember' in request.form: remember = True login_user(FLUser(user)) return redirect('/') except Exception",
"get_user, has_superuser, write_to_db, delete_from_db, get_all from settings import SECRET_KEY auth = Blueprint('auth', __name__,",
"@auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser: return redirect('/') info = request.args.get('info', None)",
"errs = None info = None if request.method == 'POST': try: uname =",
"info = request.args.get('info', None) errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy',",
"def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return",
"logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return redirect('/') if",
"errs = [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already",
"request.method == 'POST': try: uname = request.form['username'].lower() user = get_user(uname) assert user assert",
"redirect, url_for from flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security",
"= None info = None if request.method == 'POST': try: uname = request.form['username']",
"unames that are numbers to avoid confusing the ID catcher raise ValueError() if",
"user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if not current_user.is_superuser: return redirect('/') uid",
"'POST']) def firstrun(): if has_superuser(): return redirect('/login') errs = None if request.method ==",
"request.form['password']) remember = False if 'remember' in request.form: remember = True login_user(FLUser(user)) return",
"== pw2 if not uname == current_user.username and not current_user.is_superuser: raise NoPermissionError() u",
"request.form else False create_user(uname, request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname) except UserExists:",
"'POST']) @login_required def change_password(): uid = request.args.get('uid', None) user = None if uid",
"mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user():",
"request.form['username'] admin = True if 'superuser' in request.form else False create_user(uname, request.form['password'], sudo=admin)",
"errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user',",
"= generate_password_hash(pw1) write_to_db(u) info = \"Password changed for '{}'\".format(uname) except NoPermissionError: errs =",
"@login_required def demote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if",
"passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if",
"'POST': try: uname = request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert pw1",
"current_user, login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash from db import User, get_user,",
"request.args.get('uid', None) user = None if uid and current_user.is_superuser: user = get_user(uid) else:",
"not current_user.is_superuser: return redirect('/') errs = None info = None if request.method ==",
"from werkzeug.security import generate_password_hash, check_password_hash from db import User, get_user, has_superuser, write_to_db, delete_from_db,",
"write_to_db, delete_from_db, get_all from settings import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') #",
"= [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser():",
"< 4 or name.isdigit(): # Disallow unames that are numbers to avoid confusing",
"get_user(uname) assert user assert check_password(user, request.form['password']) remember = False if 'remember' in request.form:",
"has_superuser, write_to_db, delete_from_db, get_all from settings import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates')",
"login_required, current_user, login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash from db import User,",
"check_password(user, request.form['password']) remember = False if 'remember' in request.form: remember = True login_user(FLUser(user))",
"attributes to the User object def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def",
"get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user,",
"create_user(uname, request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS]",
"to avoid confusing the ID catcher raise ValueError() if get_user(name.lower()): raise UserExists() u",
"render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if not current_user.is_superuser:",
"except Exception as ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET',",
"ex: print(ex) errs = [\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs,",
"Use the UserMixin from Flask-Login to make this easy. class FLUser(UserMixin): def __init__(self,",
"avoid confusing the ID catcher raise ValueError() if get_user(name.lower()): raise UserExists() u =",
"ERR_USER_EXISTS = \"User already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET',",
"users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if not current_user.is_superuser: return",
"< 3 or len(password) < 4 or name.isdigit(): # Disallow unames that are",
"= sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout')",
"print(ex) errs = [\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info)",
"created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except Exception as ex: errs = [\"User",
"sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required",
"has_superuser(): return redirect('/login') errs = None if request.method == 'POST': try: assert request.form['password']",
"confusing the ID catcher raise ValueError() if get_user(name.lower()): raise UserExists() u = User(username=name.lower(),",
"return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method ==",
"redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST':",
"if not name or not password or len(name) < 3 or len(password) <",
"current_user.is_authenticated(): return redirect('/') if not has_superuser(): return redirect('/firstrun') errs = None if request.method",
"None if request.method == 'POST': try: uname = request.form['username'].lower() user = get_user(uname) assert",
"not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = True uname =",
"self.user.username # Redirect missing attributes to the User object def __getattr__(self, name): return",
"= [\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET',",
"redirect('/') errs = None info = None if request.method == 'POST': try: assert",
"= [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user():",
"catcher raise ValueError() if get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser =",
"class UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if not name",
"errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid = request.args.get('uid', None) user =",
"current_user.is_superuser: return redirect('/') info = request.args.get('info', None) errs = [] return render_template('auth/manage.html', users=get_all(User),",
"request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as ex:",
"try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as ex: errs =",
"NoPermissionError: errs = [\"Permission denied.\"] except Exception as ex: print(ex) errs = [\"Password",
"user = get_user(uid) else: user = current_user errs = None info = None",
"return redirect('/') errs = None info = None if request.method == 'POST': try:",
"assert pw1 == pw2 if not uname == current_user.username and not current_user.is_superuser: raise",
"= \"User already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST'])",
"None if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually destroy",
"username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser(): return redirect('/login')",
"raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password changed for '{}'\".format(uname) except",
"User object def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user =",
"request.form['password'] pw2 = request.form['password-confirm'] assert pw1 == pw2 if not uname == current_user.username",
"user): self.user = user def get_id(self): return self.user.username # Redirect missing attributes to",
"denied.\"] except Exception as ex: print(ex) errs = [\"Password change failed; mismatching passwords?\"]",
"def check_password(user, password): return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required def logout():",
"errs = None if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': #",
"request.args.get('info', None) errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST'])",
"request.form['password-confirm'] assert pw1 == pw2 if not uname == current_user.username and not current_user.is_superuser:",
"import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager()",
"uname = request.form['username'].lower() user = get_user(uname) assert user assert check_password(user, request.form['password']) remember =",
"Routing @auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login():",
"u.is_superuser = sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password) # Flask Routing",
"= user def get_id(self): return self.user.username # Redirect missing attributes to the User",
"if current_user.is_authenticated(): return redirect('/') if not has_superuser(): return redirect('/firstrun') errs = None if",
"else False create_user(uname, request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname) except UserExists: errs",
"request, redirect, url_for from flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user, logout_user from",
"= request.args.get('info', None) errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET',",
"# Actually destroy uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname)))",
"current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user",
"= None if uid and current_user.is_superuser: user = get_user(uid) else: user = current_user",
"flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash",
"None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower()",
"current_user.is_superuser: return redirect('/') errs = None info = None if request.method == 'POST':",
"uname == current_user.username and not current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if len(pw1)",
"redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = True uname = user.username write_to_db(user) return",
"= [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists;",
"deleted.\".format(uname))) except Exception as ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password',",
"= None info = None if request.method == 'POST': try: assert request.form['password'] ==",
"None if uid and current_user.is_superuser: user = get_user(uid) else: user = current_user errs",
"[\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts():",
"if 'superuser' in request.form else False create_user(uname, request.form['password'], sudo=admin) info = \"User '{}'",
"return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None if not user: return redirect(url_for('auth.manage_accounts'))",
"ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun():",
"= request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs =",
"Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) #",
"Exception as ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST'])",
"not current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if len(pw1) < 4: raise ValueError()",
"@auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return redirect('/') if not has_superuser(): return",
"pw1 == pw2 if not uname == current_user.username and not current_user.is_superuser: raise NoPermissionError()",
"redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if not",
"= True if 'superuser' in request.form else False create_user(uname, request.form['password'], sudo=admin) info =",
"current_user.is_superuser: user = get_user(uid) else: user = current_user errs = None info =",
"try: uname = request.form['username'].lower() user = get_user(uname) assert user assert check_password(user, request.form['password']) remember",
"return render_template('auth/setup_complete.html') except Exception as ex: errs = [\"Invalid credentials. Mismatching passwords?\"] return",
"not name or not password or len(name) < 3 or len(password) < 4",
"from flask import Blueprint, render_template, abort, request, redirect, url_for from flask.ext.login import LoginManager,",
"ex: errs = [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User",
"not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts'))",
"methods=['GET', 'POST']) @login_required def destroy_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid',",
"'POST': user.is_superuser = True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return",
"errs = [\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required",
"request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin =",
"has_superuser(): return redirect('/firstrun') errs = None if request.method == 'POST': try: uname =",
"__getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user = get_user(uname) if user:",
"methods=['GET', 'POST']) def firstrun(): if has_superuser(): return redirect('/login') errs = None if request.method",
"info = None if request.method == 'POST': try: uname = request.form['username'] pw1 =",
"None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass def",
"@auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name,",
"return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser(): return redirect('/login') errs",
"errs = None info = None if request.method == 'POST': try: assert request.form['password']",
"uid = request.args.get('uid', None) user = None if uid and current_user.is_superuser: user =",
"name.isdigit(): # Disallow unames that are numbers to avoid confusing the ID catcher",
"4 or name.isdigit(): # Disallow unames that are numbers to avoid confusing the",
"@login_required def change_password(): uid = request.args.get('uid', None) user = None if uid and",
"redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as ex: errs = [str(ex)] return render_template('auth/destroy.html',",
"return redirect('/') uid = request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user =",
"True login_user(FLUser(user)) return redirect('/') except Exception as ex: errs = [\"Incorrect username/password.\"] return",
"the UserMixin from Flask-Login to make this easy. class FLUser(UserMixin): def __init__(self, user):",
"Flask Routing @auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def",
"request.method == 'POST': # Actually destroy uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts',",
"= False if 'remember' in request.form: remember = True login_user(FLUser(user)) return redirect('/') except",
"from Flask-Login to make this easy. class FLUser(UserMixin): def __init__(self, user): self.user =",
"request.form: remember = True login_user(FLUser(user)) return redirect('/') except Exception as ex: errs =",
"current_user errs = None info = None if request.method == 'POST': try: uname",
"@auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if not current_user.is_superuser: return redirect('/') uid =",
"len(password) < 4 or name.isdigit(): # Disallow unames that are numbers to avoid",
"if not current_user.is_superuser: return redirect('/') errs = None info = None if request.method",
"password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password) # Flask",
"user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if not current_user.is_superuser: return",
"= current_user errs = None info = None if request.method == 'POST': try:",
"user = current_user errs = None info = None if request.method == 'POST':",
"[str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid =",
"request.method == 'POST': try: uname = request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm']",
"logout_user from werkzeug.security import generate_password_hash, check_password_hash from db import User, get_user, has_superuser, write_to_db,",
"request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except",
"password): return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required def logout(): logout_user() return",
"User, get_user, has_superuser, write_to_db, delete_from_db, get_all from settings import SECRET_KEY auth = Blueprint('auth',",
"request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as ex: errs = [\"Invalid credentials. Mismatching",
"that are numbers to avoid confusing the ID catcher raise ValueError() if get_user(name.lower()):",
"management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if not current_user.is_superuser: return redirect('/') errs",
"errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser: return redirect('/') info =",
"SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager() def",
"= request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert pw1 == pw2 if",
"Exception as ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST'])",
"return self.user.username # Redirect missing attributes to the User object def __getattr__(self, name):",
"except Exception as ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET',",
"in request.form else False create_user(uname, request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname) except",
"the User object def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user",
"setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from Flask-Login to make this easy. class",
"user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = True uname = user.username",
"return redirect('/') if not has_superuser(): return redirect('/firstrun') errs = None if request.method ==",
"password) # Flask Routing @auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET',",
"@login_required def promote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if",
"Blueprint, render_template, abort, request, redirect, url_for from flask.ext.login import LoginManager, UserMixin, login_required, current_user,",
"[\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST'])",
"User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password) #",
"user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required",
"return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if",
"not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None if not user:",
"# Use the UserMixin from Flask-Login to make this easy. class FLUser(UserMixin): def",
"get_user(uname) if user: return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class",
"info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if not current_user.is_superuser: return redirect('/') uid",
"user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually destroy uname = user.username",
"login(): if current_user.is_authenticated(): return redirect('/') if not has_superuser(): return redirect('/firstrun') errs = None",
"methods=['GET', 'POST']) @login_required def change_password(): uid = request.args.get('uid', None) user = None if",
"assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except",
"len(pw1) < 4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password changed",
"info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if not",
"def load_user(uname): user = get_user(uname) if user: return FLUser(user) return None @auth_sys.unauthorized_handler def",
"exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page():",
"getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user = get_user(uname) if user: return FLUser(user) return",
"get_all from settings import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login",
"= request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as ex: errs =",
"user def get_id(self): return self.user.username # Redirect missing attributes to the User object",
"= request.form['password-confirm'] assert pw1 == pw2 if not uname == current_user.username and not",
"def create_user_page(): if not current_user.is_superuser: return redirect('/') errs = None info = None",
"generate_password_hash(pw1) write_to_db(u) info = \"Password changed for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission",
"check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login',",
"settings import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys =",
"abort, request, redirect, url_for from flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user, logout_user",
"'{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except Exception as ex: errs =",
"to the User object def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname):",
"to make this easy. class FLUser(UserMixin): def __init__(self, user): self.user = user def",
"user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as ex: errs",
"pass class NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if not name or not",
"None info = None if request.method == 'POST': try: uname = request.form['username'] pw1",
"None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user: return",
"@auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if not current_user.is_superuser: return redirect('/') errs =",
"href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if not current_user.is_superuser: return redirect('/')",
"= request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not",
"and current_user.is_superuser: user = get_user(uid) else: user = current_user errs = None info",
"failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not",
"== request.form['password-confirm'] uname = request.form['username'] admin = True if 'superuser' in request.form else",
"mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser:",
"user.is_superuser = True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html',",
"if user: return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError):",
"from settings import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys",
"Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps you wanted",
"the ID catcher raise ValueError() if get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password))",
"uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method",
"if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not uid: return",
"request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert pw1 == pw2 if not",
"except Exception as ex: errs = [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs)",
"if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin",
"if len(pw1) < 4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password",
"methods=['GET', 'POST']) @login_required def create_user_page(): if not current_user.is_superuser: return redirect('/') errs = None",
"== 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin = True",
"def promote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not",
"= get_user(uid) else: user = current_user errs = None info = None if",
"passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser: return",
"redirect('/login') errs = None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm']",
"or not password or len(name) < 3 or len(password) < 4 or name.isdigit():",
"= user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as ex:",
"return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError): pass class",
"errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if not current_user.is_superuser: return redirect('/')",
"if request.method == 'POST': # Actually destroy uname = user.username try: delete_from_db(user) return",
"info=\"User {} deleted.\".format(uname))) except Exception as ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user,",
"make this easy. class FLUser(UserMixin): def __init__(self, user): self.user = user def get_id(self):",
"if request.method == 'POST': user.is_superuser = True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts',",
"user = get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser",
"UserMixin from Flask-Login to make this easy. class FLUser(UserMixin): def __init__(self, user): self.user",
"object def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user = get_user(uname)",
"raise ValueError() if get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo",
"numbers to avoid confusing the ID catcher raise ValueError() if get_user(name.lower()): raise UserExists()",
"render_template('auth/setup_complete.html') except Exception as ex: errs = [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html',",
"auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from Flask-Login to",
"firstrun(): if has_superuser(): return redirect('/login') errs = None if request.method == 'POST': try:",
"def create_user(name, password, sudo=False): if not name or not password or len(name) <",
"return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as ex: errs = [str(ex)] return",
"render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\"",
"return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if not",
"errs = [\"Permission denied.\"] except Exception as ex: print(ex) errs = [\"Password change",
"get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = False",
"ValueError() if get_user(name.lower()): raise UserExists() u = User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u)",
"# Flask Routing @auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST'])",
"redirect('/') except Exception as ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun',",
"user = get_user(uname) assert user assert check_password(user, request.form['password']) remember = False if 'remember'",
"try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin = True if 'superuser'",
"self.user = user def get_id(self): return self.user.username # Redirect missing attributes to the",
"logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return redirect('/')",
"sudo=False): if not name or not password or len(name) < 3 or len(password)",
"as ex: errs = [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS =",
"not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = False uname =",
"current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if len(pw1) < 4: raise ValueError() u.password",
"for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"] except Exception as ex: print(ex)",
"delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User {} deleted.\".format(uname))) except Exception as ex: errs = [str(ex)]",
"errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if",
"this easy. class FLUser(UserMixin): def __init__(self, user): self.user = user def get_id(self): return",
"Exception as ex: print(ex) errs = [\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html',",
"Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from Flask-Login",
"except Exception as ex: print(ex) errs = [\"Password change failed; mismatching passwords?\"] return",
"return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user():",
"load_user(uname): user = get_user(uname) if user: return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized():",
"None if request.method == 'POST': try: uname = request.form['username'] pw1 = request.form['password'] pw2",
"password or len(name) < 3 or len(password) < 4 or name.isdigit(): # Disallow",
"remember = True login_user(FLUser(user)) return redirect('/') except Exception as ex: errs = [\"Incorrect",
"def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from Flask-Login to make this easy.",
"def manage_accounts(): if not current_user.is_superuser: return redirect('/') info = request.args.get('info', None) errs =",
"render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser(): return redirect('/login') errs =",
"not password or len(name) < 3 or len(password) < 4 or name.isdigit(): #",
"if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user: return redirect(url_for('auth.manage_accounts'))",
"request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user:",
"uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET',",
"errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password():",
"admin = True if 'superuser' in request.form else False create_user(uname, request.form['password'], sudo=admin) info",
"return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if not",
"errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if not current_user.is_superuser: return redirect('/')",
"request.method == 'POST': user.is_superuser = False uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{}",
"uid = request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs",
"except Exception as ex: errs = [\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html',",
"if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = False uname",
"redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if",
"promote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not uid:",
"NoPermissionError() u = get_user(uname) if len(pw1) < 4: raise ValueError() u.password = generate_password_hash(pw1)",
"methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return redirect('/') if not has_superuser(): return redirect('/firstrun')",
"as ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs) @auth.route('/firstrun', methods=['GET', 'POST']) def",
"{} deleted.\".format(uname))) except Exception as ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs)",
"'{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"] except Exception as ex: print(ex) errs",
"Exception as ex: errs = [\"Invalid credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS",
"missing attributes to the User object def __getattr__(self, name): return getattr(self.user, name) @auth_sys.user_loader",
"== current_user.username and not current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if len(pw1) <",
"if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None if not",
"assert user assert check_password(user, request.form['password']) remember = False if 'remember' in request.form: remember",
"try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html')",
"== 'POST': user.is_superuser = False uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} demoted.\".format(uname)))",
"True if 'superuser' in request.form else False create_user(uname, request.form['password'], sudo=admin) info = \"User",
"methods=['GET', 'POST']) @login_required def demote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid',",
"remember = False if 'remember' in request.form: remember = True login_user(FLUser(user)) return redirect('/')",
"return redirect('/') except Exception as ex: errs = [\"Incorrect username/password.\"] return render_template('auth/login.html', errors=errs)",
"Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app) # Use the UserMixin from",
"and not current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if len(pw1) < 4: raise",
"get_user(uid) else: user = current_user errs = None info = None if request.method",
"uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None if not user: return",
"UserExists(ValueError): pass class NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if not name or",
"get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = True",
"return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account",
"'POST']) @login_required def create_user_page(): if not current_user.is_superuser: return redirect('/') errs = None info",
"redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = False uname = user.username write_to_db(user) return",
"uname = request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert pw1 == pw2",
"if uid and current_user.is_superuser: user = get_user(uid) else: user = current_user errs =",
"demote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not uid:",
"if not current_user.is_superuser: return redirect('/') info = request.args.get('info', None) errs = [] return",
"check_password(user, password): return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required def logout(): logout_user()",
"\"Password changed for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"] except Exception as",
"flask import Blueprint, render_template, abort, request, redirect, url_for from flask.ext.login import LoginManager, UserMixin,",
"None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None if",
"creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if",
"= None if request.method == 'POST': try: uname = request.form['username'] pw1 = request.form['password']",
"get_user(uname) if len(pw1) < 4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info =",
"uid and current_user.is_superuser: user = get_user(uid) else: user = current_user errs = None",
"Flask-Login to make this easy. class FLUser(UserMixin): def __init__(self, user): self.user = user",
"sudo=admin) info = \"User '{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except Exception",
"[] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def destroy_user(): if",
"change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required",
"credentials. Mismatching passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps you",
"assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin = True if 'superuser' in",
"info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def promote_user(): if not current_user.is_superuser: return redirect('/') uid",
"return check_password_hash(user.password, password) # Flask Routing @auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html')",
"return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return redirect('/') if not",
"def change_password(): uid = request.args.get('uid', None) user = None if uid and current_user.is_superuser:",
"wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if not current_user.is_superuser:",
"def __init__(self, user): self.user = user def get_id(self): return self.user.username # Redirect missing",
"== 'POST': user.is_superuser = True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname)))",
"uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as ex: errs",
"return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login') class UserExists(ValueError): pass class NoPermissionError(Exception): pass",
"'POST']) @login_required def destroy_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None)",
"def get_id(self): return self.user.username # Redirect missing attributes to the User object def",
"request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as ex: errs = [\"Invalid",
"= True uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user)",
"as ex: errs = [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required",
"u = get_user(uname) if len(pw1) < 4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u)",
"info = \"User '{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except Exception as",
"Exception as ex: errs = [\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs,",
"not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if",
"already exists; perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def",
"create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception as ex: errs = [\"Invalid credentials.",
"None info = None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm']",
"you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if not",
"\"User '{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except Exception as ex: errs",
"failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user, errors=errs, info=info) @auth.route('/users/promote', methods=['GET', 'POST']) @login_required def",
"render_template, abort, request, redirect, url_for from flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user,",
"redirect('/firstrun') errs = None if request.method == 'POST': try: uname = request.form['username'].lower() user",
"= [\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def",
"info = \"Password changed for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"] except",
"url_for from flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security import",
"FLUser(UserMixin): def __init__(self, user): self.user = user def get_id(self): return self.user.username # Redirect",
"perhaps you wanted <a href=\\\"/manage-accounts\\\">account management</a>?\" @auth.route('/create-user', methods=['GET', 'POST']) @login_required def create_user_page(): if",
"render_template('auth/create_user.html', errors=errs, info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser: return redirect('/') info",
"write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} promoted.\".format(uname))) return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def",
"False create_user(uname, request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname) except UserExists: errs =",
"return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser = True uname = user.username write_to_db(user)",
"= False uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} demoted.\".format(uname))) return render_template('auth/demote.html', user=user)",
"redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually destroy uname = user.username try: delete_from_db(user)",
"else: user = current_user errs = None info = None if request.method ==",
"@auth.route('/firstrun', methods=['GET', 'POST']) def firstrun(): if has_superuser(): return redirect('/login') errs = None if",
"None) errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required",
"destroy_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not uid:",
"'POST']) @login_required def demote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None)",
"= User(username=name.lower(), password=generate_password_hash(password)) u.is_superuser = sudo write_to_db(u) def check_password(user, password): return check_password_hash(user.password, password)",
"info = None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname",
"= get_user(uid) if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': user.is_superuser =",
"assert check_password(user, request.form['password']) remember = False if 'remember' in request.form: remember = True",
"raise NoPermissionError() u = get_user(uname) if len(pw1) < 4: raise ValueError() u.password =",
"= get_user(uname) if user: return FLUser(user) return None @auth_sys.unauthorized_handler def unauthorized(): return redirect('/login')",
"or name.isdigit(): # Disallow unames that are numbers to avoid confusing the ID",
"uid = request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) if",
"errs = None if request.method == 'POST': try: uname = request.form['username'].lower() user =",
"info=info) @auth.route('/manage-accounts') @login_required def manage_accounts(): if not current_user.is_superuser: return redirect('/') info = request.args.get('info',",
"not has_superuser(): return redirect('/firstrun') errs = None if request.method == 'POST': try: uname",
"user assert check_password(user, request.form['password']) remember = False if 'remember' in request.form: remember =",
"try: uname = request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert pw1 ==",
"= Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app): auth_sys.init_app(app)",
"delete_from_db, get_all from settings import SECRET_KEY auth = Blueprint('auth', __name__, template_folder='templates') # Auth",
"create_user_page(): if not current_user.is_superuser: return redirect('/') errs = None info = None if",
"= [str(ex)] return render_template('auth/destroy.html', user=user, errors=errs) @auth.route('/users/change-password', methods=['GET', 'POST']) @login_required def change_password(): uid",
"as ex: print(ex) errs = [\"Password change failed; mismatching passwords?\"] return render_template('auth/change_password.html', user=user,",
"render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if current_user.is_authenticated(): return redirect('/') if not has_superuser():",
"class NoPermissionError(Exception): pass def create_user(name, password, sudo=False): if not name or not password",
"db import User, get_user, has_superuser, write_to_db, delete_from_db, get_all from settings import SECRET_KEY auth",
"'POST']) def login(): if current_user.is_authenticated(): return redirect('/') if not has_superuser(): return redirect('/firstrun') errs",
"@login_required def destroy_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if",
"request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return render_template('auth/setup_complete.html') except Exception",
"if has_superuser(): return redirect('/login') errs = None if request.method == 'POST': try: assert",
"def destroy_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None) if not",
"len(name) < 3 or len(password) < 4 or name.isdigit(): # Disallow unames that",
"return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually destroy uname = user.username try:",
"@login_required def manage_accounts(): if not current_user.is_superuser: return redirect('/') info = request.args.get('info', None) errs",
"current_user.username and not current_user.is_superuser: raise NoPermissionError() u = get_user(uname) if len(pw1) < 4:",
"name): return getattr(self.user, name) @auth_sys.user_loader def load_user(uname): user = get_user(uname) if user: return",
"request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'],",
"errs = [] return render_template('auth/manage.html', users=get_all(User), errors=errs, info=info) @auth.route('/users/destroy', methods=['GET', 'POST']) @login_required def",
"'superuser' in request.form else False create_user(uname, request.form['password'], sudo=admin) info = \"User '{}' created.\".format(uname)",
"= None if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually",
"return render_template('auth/promote.html', user=user) @auth.route('/users/demote', methods=['GET', 'POST']) @login_required def demote_user(): if not current_user.is_superuser: return",
"user.is_superuser = False uname = user.username write_to_db(user) return redirect(url_for('auth.manage_accounts', info=\"{} demoted.\".format(uname))) return render_template('auth/demote.html',",
"auth_sys.init_app(app) # Use the UserMixin from Flask-Login to make this easy. class FLUser(UserMixin):",
"= \"User '{}' created.\".format(uname) except UserExists: errs = [ERR_USER_EXISTS] except Exception as ex:",
"# Disallow unames that are numbers to avoid confusing the ID catcher raise",
"== 'POST': try: uname = request.form['username'].lower() user = get_user(uname) assert user assert check_password(user,",
"if not user: return redirect(url_for('auth.manage_accounts')) if request.method == 'POST': # Actually destroy uname",
"= None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname =",
"auth = Blueprint('auth', __name__, template_folder='templates') # Auth Helpers/Flask-Login auth_sys = LoginManager() def setup_auth(app):",
"@auth.route('/logout') @login_required def logout(): logout_user() return render_template('auth/logout.html') @auth.route('/login', methods=['GET', 'POST']) def login(): if",
"sudo=True) return render_template('auth/setup_complete.html') except Exception as ex: errs = [\"Invalid credentials. Mismatching passwords?\"]",
"== 'POST': # Actually destroy uname = user.username try: delete_from_db(user) return redirect(url_for('auth.manage_accounts', info=\"User",
"LoginManager, UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash from db",
"'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'] admin = True if",
"if not uname == current_user.username and not current_user.is_superuser: raise NoPermissionError() u = get_user(uname)",
"passwords?\"] return render_template('auth/firstrun.html', errors=errs) ERR_USER_EXISTS = \"User already exists; perhaps you wanted <a",
"= True login_user(FLUser(user)) return redirect('/') except Exception as ex: errs = [\"Incorrect username/password.\"]",
"class FLUser(UserMixin): def __init__(self, user): self.user = user def get_id(self): return self.user.username #",
"create_user(name, password, sudo=False): if not name or not password or len(name) < 3",
"== 'POST': try: uname = request.form['username'] pw1 = request.form['password'] pw2 = request.form['password-confirm'] assert",
"'POST']) @login_required def promote_user(): if not current_user.is_superuser: return redirect('/') uid = request.args.get('uid', None)",
"easy. class FLUser(UserMixin): def __init__(self, user): self.user = user def get_id(self): return self.user.username",
"u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password changed for '{}'\".format(uname) except NoPermissionError: errs",
"= \"Password changed for '{}'\".format(uname) except NoPermissionError: errs = [\"Permission denied.\"] except Exception",
"as ex: errs = [\"User creation failed; mismatching passwords?\"] return render_template('auth/create_user.html', errors=errs, info=info)",
"return redirect('/firstrun') errs = None if request.method == 'POST': try: uname = request.form['username'].lower()",
"'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username'].lower() create_user(uname, request.form['password'], sudo=True) return",
"except NoPermissionError: errs = [\"Permission denied.\"] except Exception as ex: print(ex) errs =",
"from flask.ext.login import LoginManager, UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security import generate_password_hash,",
"4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info = \"Password changed for '{}'\".format(uname)",
"def firstrun(): if has_superuser(): return redirect('/login') errs = None if request.method == 'POST':",
"'POST': try: uname = request.form['username'].lower() user = get_user(uname) assert user assert check_password(user, request.form['password'])",
"if request.method == 'POST': try: uname = request.form['username'] pw1 = request.form['password'] pw2 =",
"request.form['username'].lower() user = get_user(uname) assert user assert check_password(user, request.form['password']) remember = False if",
"import LoginManager, UserMixin, login_required, current_user, login_user, logout_user from werkzeug.security import generate_password_hash, check_password_hash from",
"request.args.get('uid', None) if not uid: return redirect(url_for('auth.manage_accounts')) user = get_user(uid) errs = None",
"None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname = request.form['username']",
"= get_user(uname) if len(pw1) < 4: raise ValueError() u.password = generate_password_hash(pw1) write_to_db(u) info",
"errs = None if request.method == 'POST': try: assert request.form['password'] == request.form['password-confirm'] uname"
] |
[
"data from a local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal",
"__init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly passed",
"\"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments",
"helps get data from a local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for",
"\"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly passed to base",
"from dataduct.steps import ExtractLocalStep import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step",
"CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly passed to base class \"\"\" logger.info('Using",
"input from local files \"\"\" from dataduct.steps import ExtractLocalStep import logging logger =",
"ETL step wrapper for creating an S3 node for input from local files",
"class that helps get data from a local file \"\"\" def __init__(self, **kwargs):",
"CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get data from a local file \"\"\"",
"\"\"\"CustomExtractLocal Step class that helps get data from a local file \"\"\" def",
"wrapper for creating an S3 node for input from local files \"\"\" from",
"import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get",
"logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get data from a local",
"an S3 node for input from local files \"\"\" from dataduct.steps import ExtractLocalStep",
"from a local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class",
"Step class that helps get data from a local file \"\"\" def __init__(self,",
"get data from a local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the",
"for the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly passed to base class",
"class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get data from a local file",
"def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly",
"S3 node for input from local files \"\"\" from dataduct.steps import ExtractLocalStep import",
"from local files \"\"\" from dataduct.steps import ExtractLocalStep import logging logger = logging.getLogger(__name__)",
"logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get data",
"class Args: **kwargs(optional): Keyword arguments directly passed to base class \"\"\" logger.info('Using the",
"for input from local files \"\"\" from dataduct.steps import ExtractLocalStep import logging logger",
"ExtractLocalStep import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps",
"logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get data from",
"directly passed to base class \"\"\" logger.info('Using the Custom Extract Local Step') super(CustomExtractLocalStep,",
"for creating an S3 node for input from local files \"\"\" from dataduct.steps",
"dataduct.steps import ExtractLocalStep import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class",
"Args: **kwargs(optional): Keyword arguments directly passed to base class \"\"\" logger.info('Using the Custom",
"node for input from local files \"\"\" from dataduct.steps import ExtractLocalStep import logging",
"that helps get data from a local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor",
"**kwargs(optional): Keyword arguments directly passed to base class \"\"\" logger.info('Using the Custom Extract",
"\"\"\" ETL step wrapper for creating an S3 node for input from local",
"local files \"\"\" from dataduct.steps import ExtractLocalStep import logging logger = logging.getLogger(__name__) class",
"step wrapper for creating an S3 node for input from local files \"\"\"",
"**kwargs): \"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly passed to",
"= logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that helps get data from a",
"the CustomExtractLocal class Args: **kwargs(optional): Keyword arguments directly passed to base class \"\"\"",
"Keyword arguments directly passed to base class \"\"\" logger.info('Using the Custom Extract Local",
"files \"\"\" from dataduct.steps import ExtractLocalStep import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep):",
"\"\"\" from dataduct.steps import ExtractLocalStep import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal",
"file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional): Keyword",
"passed to base class \"\"\" logger.info('Using the Custom Extract Local Step') super(CustomExtractLocalStep, self).__init__(**kwargs)",
"creating an S3 node for input from local files \"\"\" from dataduct.steps import",
"a local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class Args:",
"import ExtractLocalStep import logging logger = logging.getLogger(__name__) class CustomExtractLocalStep(ExtractLocalStep): \"\"\"CustomExtractLocal Step class that",
"local file \"\"\" def __init__(self, **kwargs): \"\"\"Constructor for the CustomExtractLocal class Args: **kwargs(optional):",
"arguments directly passed to base class \"\"\" logger.info('Using the Custom Extract Local Step')"
] |
[
"\"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker image's",
"Docker_image module \"\"\" import hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A",
"-*- \"\"\" Docker_image module \"\"\" import hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject):",
"...) :param repository: repository's name :param tag: image's tag :param registry: Docker registry",
"self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository self._manifest =",
"registry self.repository = repository self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def",
"is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property def short_sha(self):",
"None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for this image",
"ordered layers :returns list: \"\"\" manifest = self.manifest layers = [] # Check",
"self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository self._manifest = None self._sha = None",
"fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List layers for fs_layer in",
"__init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker image's name (ex:",
"= manifest['layers'] digest_field = 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum'",
"ubuntu, centos, ...) :param repository: repository's name :param tag: image's tag :param registry:",
"self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for",
"= None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for this",
"layers list (used for clair layer_name) :return: sha256 \"\"\" if self._sha is None:",
":param name: docker image's name (ex: ubuntu, centos, ...) :param repository: repository's name",
"= tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository self._manifest = None self._sha",
"token(self): \"\"\" Token for this image :return: \"\"\" return self.registry.get_token(self) @property def sha(self):",
"[] # Check Version if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field =",
"sha256 \"\"\" if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return",
"= name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository",
"repository self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token",
"is None: self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get ordered layers",
"\"\"\" A Docker Image \"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor",
"\"\"\" Get manifest :returns dict: \"\"\" if self._manifest is None: self._manifest = self.registry.get_manifest(self)",
"if self._manifest is None: self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get",
"name: docker image's name (ex: ubuntu, centos, ...) :param repository: repository's name :param",
"self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository =",
"= 'blobSum' # List layers for fs_layer in fs_layers: if fs_layer[digest_field] not in",
"version (12 characters) \"\"\" return self.sha[:12] @property def manifest(self): \"\"\" Get manifest :returns",
"coding: utf-8 -*- \"\"\" Docker_image module \"\"\" import hashlib from paclair.logged_object import LoggedObject",
"for clair layer_name) :return: sha256 \"\"\" if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8'))",
"of the layers list (used for clair layer_name) :return: sha256 \"\"\" if self._sha",
"\"\"\" Get ordered layers :returns list: \"\"\" manifest = self.manifest layers = []",
"manifest['layers'] digest_field = 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' #",
"Docker registry \"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag))",
"# -*- coding: utf-8 -*- \"\"\" Docker_image module \"\"\" import hashlib from paclair.logged_object",
"image's name (ex: ubuntu, centos, ...) :param repository: repository's name :param tag: image's",
"= registry self.repository = repository self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property",
"hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property def short_sha(self): \"\"\" Sha short version",
"\"\"\" return self.sha[:12] @property def manifest(self): \"\"\" Get manifest :returns dict: \"\"\" if",
"Get ordered layers :returns list: \"\"\" manifest = self.manifest layers = [] #",
"@property def sha(self): \"\"\" Sha256 of the layers list (used for clair layer_name)",
"else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List layers for fs_layer",
"manifest = self.manifest layers = [] # Check Version if manifest[\"schemaVersion\"] == 2:",
"import hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\"",
"list: \"\"\" manifest = self.manifest layers = [] # Check Version if manifest[\"schemaVersion\"]",
"\"\"\" if self._manifest is None: self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\"",
"[])[::-1] digest_field = 'blobSum' # List layers for fs_layer in fs_layers: if fs_layer[digest_field]",
"(12 characters) \"\"\" return self.sha[:12] @property def manifest(self): \"\"\" Get manifest :returns dict:",
"layers :returns list: \"\"\" manifest = self.manifest layers = [] # Check Version",
"# List layers for fs_layer in fs_layers: if fs_layer[digest_field] not in layers: layers.append(fs_layer[digest_field])",
"registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker image's name (ex: ubuntu, centos,",
"Sha short version (12 characters) \"\"\" return self.sha[:12] @property def manifest(self): \"\"\" Get",
"m.hexdigest() return self._sha @property def short_sha(self): \"\"\" Sha short version (12 characters) \"\"\"",
"self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get ordered layers :returns list:",
"manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List layers for fs_layer in fs_layers: if",
"Constructor :param name: docker image's name (ex: ubuntu, centos, ...) :param repository: repository's",
"2: fs_layers = manifest['layers'] digest_field = 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field",
"None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for this image :return: \"\"\" return",
"(ex: ubuntu, centos, ...) :param repository: repository's name :param tag: image's tag :param",
"docker image's name (ex: ubuntu, centos, ...) :param repository: repository's name :param tag:",
"name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository self._manifest",
"short_sha(self): \"\"\" Sha short version (12 characters) \"\"\" return self.sha[:12] @property def manifest(self):",
"manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field = 'digest' else: fs_layers = manifest.get('fsLayers',",
"def token(self): \"\"\" Token for this image :return: \"\"\" return self.registry.get_token(self) @property def",
"self.manifest layers = [] # Check Version if manifest[\"schemaVersion\"] == 2: fs_layers =",
"Image \"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker",
"repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker image's name (ex: ubuntu, centos, ...)",
"tag: image's tag :param registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name = name",
"def sha(self): \"\"\" Sha256 of the layers list (used for clair layer_name) :return:",
"== 2: fs_layers = manifest['layers'] digest_field = 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1]",
"paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def __init__(self, name,",
"tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository self._manifest = None self._sha =",
"= hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property def short_sha(self): \"\"\" Sha short",
"-*- coding: utf-8 -*- \"\"\" Docker_image module \"\"\" import hashlib from paclair.logged_object import",
"get_layers(self): \"\"\" Get ordered layers :returns list: \"\"\" manifest = self.manifest layers =",
"Get manifest :returns dict: \"\"\" if self._manifest is None: self._manifest = self.registry.get_manifest(self) return",
"\"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry =",
"utf-8 -*- \"\"\" Docker_image module \"\"\" import hashlib from paclair.logged_object import LoggedObject class",
"def manifest(self): \"\"\" Get manifest :returns dict: \"\"\" if self._manifest is None: self._manifest",
"manifest(self): \"\"\" Get manifest :returns dict: \"\"\" if self._manifest is None: self._manifest =",
"self._sha = m.hexdigest() return self._sha @property def short_sha(self): \"\"\" Sha short version (12",
"short version (12 characters) \"\"\" return self.sha[:12] @property def manifest(self): \"\"\" Get manifest",
"self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for this image :return:",
"Token for this image :return: \"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256",
"Sha256 of the layers list (used for clair layer_name) :return: sha256 \"\"\" if",
"if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field = 'digest' else: fs_layers =",
"repository's name :param tag: image's tag :param registry: Docker registry \"\"\" super(DockerImage, self).__init__()",
"centos, ...) :param repository: repository's name :param tag: image's tag :param registry: Docker",
"None: self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get ordered layers :returns",
"\"\"\" Sha256 of the layers list (used for clair layer_name) :return: sha256 \"\"\"",
"self._sha @property def short_sha(self): \"\"\" Sha short version (12 characters) \"\"\" return self.sha[:12]",
"name (ex: ubuntu, centos, ...) :param repository: repository's name :param tag: image's tag",
"def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker image's name",
"image :return: \"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of the layers",
"LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def __init__(self, name, registry, repository=\"\",",
"class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'):",
"= [] # Check Version if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field",
"return self._sha @property def short_sha(self): \"\"\" Sha short version (12 characters) \"\"\" return",
"for this image :return: \"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of",
"Check Version if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field = 'digest' else:",
"self.sha[:12] @property def manifest(self): \"\"\" Get manifest :returns dict: \"\"\" if self._manifest is",
"return self._manifest def get_layers(self): \"\"\" Get ordered layers :returns list: \"\"\" manifest =",
"return self.sha[:12] @property def manifest(self): \"\"\" Get manifest :returns dict: \"\"\" if self._manifest",
"sha(self): \"\"\" Sha256 of the layers list (used for clair layer_name) :return: sha256",
"super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry",
"self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property def",
"manifest :returns dict: \"\"\" if self._manifest is None: self._manifest = self.registry.get_manifest(self) return self._manifest",
"name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name: docker image's name (ex: ubuntu,",
"import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def __init__(self, name, registry,",
"\"\"\" Constructor :param name: docker image's name (ex: ubuntu, centos, ...) :param repository:",
"this image :return: \"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of the",
"\"\"\" if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha",
"if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property",
"layer_name) :return: sha256 \"\"\" if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha =",
"= self.manifest layers = [] # Check Version if manifest[\"schemaVersion\"] == 2: fs_layers",
":param repository: repository's name :param tag: image's tag :param registry: Docker registry \"\"\"",
"dict: \"\"\" if self._manifest is None: self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self):",
"List layers for fs_layer in fs_layers: if fs_layer[digest_field] not in layers: layers.append(fs_layer[digest_field]) return",
"= repository self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\"",
"registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag",
"(used for clair layer_name) :return: sha256 \"\"\" if self._sha is None: m =",
"\"\"\" manifest = self.manifest layers = [] # Check Version if manifest[\"schemaVersion\"] ==",
"return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of the layers list (used for",
":param tag: image's tag :param registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name =",
":return: \"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of the layers list",
"def short_sha(self): \"\"\" Sha short version (12 characters) \"\"\" return self.sha[:12] @property def",
"module \"\"\" import hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker",
"list (used for clair layer_name) :return: sha256 \"\"\" if self._sha is None: m",
"self._manifest def get_layers(self): \"\"\" Get ordered layers :returns list: \"\"\" manifest = self.manifest",
"digest_field = 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List",
"\"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of the layers list (used",
":param registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag =",
"self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get ordered layers :returns list: \"\"\" manifest",
"DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\"",
"'blobSum' # List layers for fs_layer in fs_layers: if fs_layer[digest_field] not in layers:",
"Version if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field = 'digest' else: fs_layers",
"'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List layers for",
"self.registry = registry self.repository = repository self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository))",
"self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository = repository self._manifest = None",
"clair layer_name) :return: sha256 \"\"\" if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha",
"\"\"\" Sha short version (12 characters) \"\"\" return self.sha[:12] @property def manifest(self): \"\"\"",
"# Check Version if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers'] digest_field = 'digest'",
"self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for this image :return: \"\"\" return self.registry.get_token(self)",
"\"\"\" Token for this image :return: \"\"\" return self.registry.get_token(self) @property def sha(self): \"\"\"",
"m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property def short_sha(self): \"\"\" Sha",
"A Docker Image \"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param",
"self.registry.get_token(self) @property def sha(self): \"\"\" Sha256 of the layers list (used for clair",
"= manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List layers for fs_layer in fs_layers:",
"layers = [] # Check Version if manifest[\"schemaVersion\"] == 2: fs_layers = manifest['layers']",
"image's tag :param registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name))",
"self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry = registry self.repository",
"= self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get ordered layers :returns list: \"\"\"",
"def get_layers(self): \"\"\" Get ordered layers :returns list: \"\"\" manifest = self.manifest layers",
"\"\"\" import hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image",
"tag :param registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag",
"= None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self): \"\"\" Token for this image :return: \"\"\"",
"= m.hexdigest() return self._sha @property def short_sha(self): \"\"\" Sha short version (12 characters)",
"@property def manifest(self): \"\"\" Get manifest :returns dict: \"\"\" if self._manifest is None:",
"<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" Docker_image module \"\"\" import hashlib from",
"\"\"\" Docker_image module \"\"\" import hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\"",
":returns dict: \"\"\" if self._manifest is None: self._manifest = self.registry.get_manifest(self) return self._manifest def",
"repository: repository's name :param tag: image's tag :param registry: Docker registry \"\"\" super(DockerImage,",
"@property def short_sha(self): \"\"\" Sha short version (12 characters) \"\"\" return self.sha[:12] @property",
"characters) \"\"\" return self.sha[:12] @property def manifest(self): \"\"\" Get manifest :returns dict: \"\"\"",
"None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest() return self._sha @property def short_sha(self): \"\"\"",
"Docker Image \"\"\" def __init__(self, name, registry, repository=\"\", tag='latest'): \"\"\" Constructor :param name:",
"self._manifest is None: self._manifest = self.registry.get_manifest(self) return self._manifest def get_layers(self): \"\"\" Get ordered",
":returns list: \"\"\" manifest = self.manifest layers = [] # Check Version if",
"@property def token(self): \"\"\" Token for this image :return: \"\"\" return self.registry.get_token(self) @property",
"name :param tag: image's tag :param registry: Docker registry \"\"\" super(DockerImage, self).__init__() self.name",
"self.repository = repository self._manifest = None self._sha = None self.logger.debug(\"INITCLASS:REPOSITORY:{repository}\".format(repository=self.repository)) @property def token(self):",
"from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def __init__(self,",
"registry \"\"\" super(DockerImage, self).__init__() self.name = name self.logger.debug(\"INITCLASS:NAMEIMAGE:{name}\".format(name=self.name)) self.tag = tag self.logger.debug(\"INITCLASS:TAG:{tag}\".format(tag=self.tag)) self.registry",
":return: sha256 \"\"\" if self._sha is None: m = hashlib.sha256(''.join(self.get_layers()).encode('utf-8')) self._sha = m.hexdigest()",
"hashlib from paclair.logged_object import LoggedObject class DockerImage(LoggedObject): \"\"\" A Docker Image \"\"\" def",
"= 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field = 'blobSum' # List layers",
"the layers list (used for clair layer_name) :return: sha256 \"\"\" if self._sha is",
"digest_field = 'blobSum' # List layers for fs_layer in fs_layers: if fs_layer[digest_field] not",
"layers for fs_layer in fs_layers: if fs_layer[digest_field] not in layers: layers.append(fs_layer[digest_field]) return layers",
"tag='latest'): \"\"\" Constructor :param name: docker image's name (ex: ubuntu, centos, ...) :param",
"fs_layers = manifest['layers'] digest_field = 'digest' else: fs_layers = manifest.get('fsLayers', [])[::-1] digest_field ="
] |
[
"or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def urls(self): return self,",
"def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def urls(self): return self, self.app_name, self.name",
"def urls(self): return self, self.app_name, self.name urls = property(urls) @property def urlpatterns(self): return",
"api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$',",
"app_name=None): self.api_endpoint = api_endpoint self.name = name or self.default_namespace self.app_name = app_name or",
"urlpatterns(self): return self.get_urls() def reverse(self, name, *args, **kwargs): return reverse('%s:%s' % (self.name, name),",
"urls = property(urls) @property def urlpatterns(self): return self.get_urls() def reverse(self, name, *args, **kwargs):",
"*args, **kwargs): return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client",
"= 'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name = name",
"reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def",
"return context class SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView def get_media(self): pass",
"None template_view = SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint",
"SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint)",
"{'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$', self.template_view.as_view(template_name=self.template_name, client=self), name='index'), )",
"reverse from hyperadmin.apirequests import InternalAPIRequest import logging class Client(object): default_namespace = 'hyper-client' default_app_name",
"'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name",
"InternalAPIRequest import logging class Client(object): default_namespace = 'hyper-client' default_app_name = 'client' def __init__(self,",
"self.name = name or self.default_namespace self.app_name = app_name or self.default_app_name def get_logger(self): return",
"% (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self,",
"= name or self.default_namespace self.app_name = app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__)",
"template_name = None template_view = SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self): api_endpoint",
"from django.conf.urls.defaults import patterns, url from django.views.generic import TemplateView from django.core.urlresolvers import reverse",
"django.views.generic import TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest import logging",
"django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest import logging class Client(object): default_namespace =",
"__init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name = name or self.default_namespace self.app_name",
"name=None, app_name=None): self.api_endpoint = api_endpoint self.name = name or self.default_namespace self.app_name = app_name",
"def reverse(self, name, *args, **kwargs): return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name)",
"ImportError: from django.conf.urls.defaults import patterns, url from django.views.generic import TemplateView from django.core.urlresolvers import",
"= self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(),",
"self, self.app_name, self.name urls = property(urls) @property def urlpatterns(self): return self.get_urls() def reverse(self,",
"= 'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint",
"get_urls(self): pass def urls(self): return self, self.app_name, self.name urls = property(urls) @property def",
"None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client):",
"default_app_name = 'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name =",
"@property def urlpatterns(self): return self.get_urls() def reverse(self, name, *args, **kwargs): return reverse('%s:%s' %",
"default_namespace = 'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint =",
"self.default_namespace self.app_name = app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass",
"<reponame>zbyte64/django-hyperadmin<filename>hyperadmin/clients/common.py try: from django.conf.urls import patterns, url except ImportError: from django.conf.urls.defaults import patterns,",
"= None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class",
"app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def urls(self): return",
"= SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request =",
"class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data())",
"def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name = name or self.default_namespace",
"return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$', self.template_view.as_view(template_name=self.template_name, client=self), name='index'),",
"api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,}",
"return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None",
"import patterns, url except ImportError: from django.conf.urls.defaults import patterns, url from django.views.generic import",
"api_endpoint self.name = name or self.default_namespace self.app_name = app_name or self.default_app_name def get_logger(self):",
"def get_media(self): pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint",
"self.api_endpoint = api_endpoint self.name = name or self.default_namespace self.app_name = app_name or self.default_app_name",
"self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url,",
"name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs): context",
"**kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name = None",
"name, *args, **kwargs): return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView):",
"= InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def",
"get_media(self): pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint =",
"'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$', self.template_view.as_view(template_name=self.template_name, client=self), name='index'), ) return",
"self.app_name, self.name urls = property(urls) @property def urlpatterns(self): return self.get_urls() def reverse(self, name,",
"import reverse from hyperadmin.apirequests import InternalAPIRequest import logging class Client(object): default_namespace = 'hyper-client'",
"def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name",
"client = None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context",
"logging class Client(object): default_namespace = 'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint, name=None,",
"SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self):",
"context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView def get_media(self):",
"self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def urls(self): return self, self.app_name,",
"import patterns, url from django.views.generic import TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests",
"from django.conf.urls import patterns, url except ImportError: from django.conf.urls.defaults import patterns, url from",
"return self.get_urls() def reverse(self, name, *args, **kwargs): return reverse('%s:%s' % (self.name, name), args=args,",
"pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request)",
"api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$', self.template_view.as_view(template_name=self.template_name, client=self),",
"= app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def urls(self):",
"**kwargs): return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client =",
"url except ImportError: from django.conf.urls.defaults import patterns, url from django.views.generic import TemplateView from",
"= api_endpoint self.name = name or self.default_namespace self.app_name = app_name or self.default_app_name def",
"class Client(object): default_namespace = 'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint, name=None, app_name=None):",
"'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$', self.template_view.as_view(template_name=self.template_name, client=self), name='index'), ) return urlpatterns",
"= super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name = None template_view =",
"template_view = SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request",
"super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView",
"Client(object): default_namespace = 'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint",
"pass def urls(self): return self, self.app_name, self.name urls = property(urls) @property def urlpatterns(self):",
"self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView def",
"return logging.getLogger(__name__) def get_urls(self): pass def urls(self): return self, self.app_name, self.name urls =",
"kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView,",
"= property(urls) @property def urlpatterns(self): return self.get_urls() def reverse(self, name, *args, **kwargs): return",
"get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name =",
"return self, self.app_name, self.name urls = property(urls) @property def urlpatterns(self): return self.get_urls() def",
"api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns",
"url from django.views.generic import TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest",
"'client' def __init__(self, api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name = name or",
"import InternalAPIRequest import logging class Client(object): default_namespace = 'hyper-client' default_app_name = 'client' def",
"(self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs):",
"self.app_name = app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def",
"reverse(self, name, *args, **kwargs): return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs, current_app=self.app_name) class",
"= None template_view = SimpleTemplateClientView def get_media(self): pass #TODO def get_context_data(self): api_endpoint =",
"django.conf.urls import patterns, url except ImportError: from django.conf.urls.defaults import patterns, url from django.views.generic",
"urls(self): return self, self.app_name, self.name urls = property(urls) @property def urlpatterns(self): return self.get_urls()",
"get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url()",
"patterns, url from django.views.generic import TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests import",
"def get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url =",
"except ImportError: from django.conf.urls.defaults import patterns, url from django.views.generic import TemplateView from django.core.urlresolvers",
"from django.views.generic import TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest import",
"api_endpoint, name=None, app_name=None): self.api_endpoint = api_endpoint self.name = name or self.default_namespace self.app_name =",
"or self.default_namespace self.app_name = app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def get_urls(self):",
"from hyperadmin.apirequests import InternalAPIRequest import logging class Client(object): default_namespace = 'hyper-client' default_app_name =",
"import logging class Client(object): default_namespace = 'hyper-client' default_app_name = 'client' def __init__(self, api_endpoint,",
"property(urls) @property def urlpatterns(self): return self.get_urls() def reverse(self, name, *args, **kwargs): return reverse('%s:%s'",
"InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self):",
"import TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest import logging class",
"self.get_urls() def reverse(self, name, *args, **kwargs): return reverse('%s:%s' % (self.name, name), args=args, kwargs=kwargs,",
"from django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest import logging class Client(object): default_namespace",
"current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs)",
"patterns, url except ImportError: from django.conf.urls.defaults import patterns, url from django.views.generic import TemplateView",
"def get_urls(self): pass def urls(self): return self, self.app_name, self.name urls = property(urls) @property",
"name or self.default_namespace self.app_name = app_name or self.default_app_name def get_logger(self): return logging.getLogger(__name__) def",
"get_logger(self): return logging.getLogger(__name__) def get_urls(self): pass def urls(self): return self, self.app_name, self.name urls",
"api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return",
"def urlpatterns(self): return self.get_urls() def reverse(self, name, *args, **kwargs): return reverse('%s:%s' % (self.name,",
"django.conf.urls.defaults import patterns, url from django.views.generic import TemplateView from django.core.urlresolvers import reverse from",
"= api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns =",
"logging.getLogger(__name__) def get_urls(self): pass def urls(self): return self, self.app_name, self.name urls = property(urls)",
"#TODO def get_context_data(self): api_endpoint = self.api_endpoint api_request = InternalAPIRequest(site=api_endpoint) api_endpoint = api_endpoint.fork(api_request=api_request) api_url",
"SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs): context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return",
"class SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView def get_media(self): pass #TODO def",
"context = super(SimpleTemplateClientView, self).get_context_data(**kwargs) context.update(self.client.get_context_data()) return context class SimpleTemplateClient(Client): template_name = None template_view",
"self.name urls = property(urls) @property def urlpatterns(self): return self.get_urls() def reverse(self, name, *args,",
"try: from django.conf.urls import patterns, url except ImportError: from django.conf.urls.defaults import patterns, url",
"args=args, kwargs=kwargs, current_app=self.app_name) class SimpleTemplateClientView(TemplateView): client = None def get_context_data(self, **kwargs): context =",
"TemplateView from django.core.urlresolvers import reverse from hyperadmin.apirequests import InternalAPIRequest import logging class Client(object):",
"context class SimpleTemplateClient(Client): template_name = None template_view = SimpleTemplateClientView def get_media(self): pass #TODO",
"= api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('', url(r'^$', self.template_view.as_view(template_name=self.template_name,",
"hyperadmin.apirequests import InternalAPIRequest import logging class Client(object): default_namespace = 'hyper-client' default_app_name = 'client'",
"api_endpoint.fork(api_request=api_request) api_url = api_endpoint.get_url() return {'media':self.get_media(), 'api_endpoint':api_url, 'client':self,} def get_urls(self): urlpatterns = patterns('',"
] |
[
"UI file! ################################################################################ from PySide6.QtCore import * from PySide6.QtGui import * from PySide6.QtWidgets",
"be lost when recompiling UI file! ################################################################################ from PySide6.QtCore import * from PySide6.QtGui",
"= QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2,",
"made in this file will be lost when recompiling UI file! ################################################################################ from",
"self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1)",
"Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog)",
"self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324,",
"will be lost when recompiling UI file! ################################################################################ from PySide6.QtCore import * from",
"0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog)",
"QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0,",
"coding: utf-8 -*- ################################################################################ ## Form generated from reading UI file 'new_shelf.ui' ##",
"self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1,",
"## ## WARNING! All changes made in this file will be lost when",
"import * from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not",
"= QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\")",
"0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2",
"0, 1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)",
"1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 =",
"1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New",
"self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog)",
"from reading UI file 'new_shelf.ui' ## ## Created by: Qt User Interface Compiler",
"225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\")",
"6.1.0 ## ## WARNING! All changes made in this file will be lost",
"0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget)",
"QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0,",
"self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\")",
"setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\", None)) self.label_2.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Description\",",
"reading UI file 'new_shelf.ui' ## ## Created by: Qt User Interface Compiler version",
"* from PySide6.QtGui import * from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self,",
"0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name =",
"self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1,",
"from PySide6.QtCore import * from PySide6.QtGui import * from PySide6.QtWidgets import * class",
"QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50,",
"Interface Compiler version 6.1.0 ## ## WARNING! All changes made in this file",
"## WARNING! All changes made in this file will be lost when recompiling",
"self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\",",
"0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name",
"PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400,",
"1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 = QLabel(self.widget)",
"self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None))",
"self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog)",
"self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self,",
"= QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description,",
"Compiler version 6.1.0 ## ## WARNING! All changes made in this file will",
"file will be lost when recompiling UI file! ################################################################################ from PySide6.QtCore import *",
"PySide6.QtCore import * from PySide6.QtGui import * from PySide6.QtWidgets import * class Ui_NewShelfDialog(object):",
"1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject)",
"-*- ################################################################################ ## Form generated from reading UI file 'new_shelf.ui' ## ## Created",
"## ## Created by: Qt User Interface Compiler version 6.1.0 ## ## WARNING!",
"= QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0,",
"QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1,",
"WARNING! All changes made in this file will be lost when recompiling UI",
"1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept)",
"lost when recompiling UI file! ################################################################################ from PySide6.QtCore import * from PySide6.QtGui import",
"300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget =",
"self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1,",
"All changes made in this file will be lost when recompiling UI file!",
"0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1)",
"= QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) #",
"10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label =",
"Qt User Interface Compiler version 6.1.0 ## ## WARNING! All changes made in",
"= QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0,",
"QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\", None))",
"1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget)",
"1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1)",
"retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\", None)) self.label_2.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Description\", None)) #",
"if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341,",
"QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1,",
"def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\")",
"NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\", None)) self.label_2.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Description\", None)) # retranslateUi",
"def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\", None)) self.label_2.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Description\", None))",
"'new_shelf.ui' ## ## Created by: Qt User Interface Compiler version 6.1.0 ## ##",
"self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def",
"self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0,",
"1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\",",
"self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10,",
"self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description = QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1,",
"User Interface Compiler version 6.1.0 ## ## WARNING! All changes made in this",
"self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1,",
"recompiling UI file! ################################################################################ from PySide6.QtCore import * from PySide6.QtGui import * from",
"import * from PySide6.QtGui import * from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def",
"self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\",",
"## Form generated from reading UI file 'new_shelf.ui' ## ## Created by: Qt",
"32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout =",
"import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300)",
"self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label",
"* class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox",
"324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget)",
"################################################################################ from PySide6.QtCore import * from PySide6.QtGui import * from PySide6.QtWidgets import *",
"341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout",
"changes made in this file will be lost when recompiling UI file! ################################################################################",
"self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget)",
"PySide6.QtGui import * from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if",
"when recompiling UI file! ################################################################################ from PySide6.QtCore import * from PySide6.QtGui import *",
"################################################################################ ## Form generated from reading UI file 'new_shelf.ui' ## ## Created by:",
"generated from reading UI file 'new_shelf.ui' ## ## Created by: Qt User Interface",
"# setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\", None)) self.label.setText(QCoreApplication.translate(\"NewShelfDialog\", u\"Name\", None)) self.label_2.setText(QCoreApplication.translate(\"NewShelfDialog\",",
"Form generated from reading UI file 'new_shelf.ui' ## ## Created by: Qt User",
"NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal)",
"240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225))",
"NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget",
"self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok) self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\")",
"file 'new_shelf.ui' ## ## Created by: Qt User Interface Compiler version 6.1.0 ##",
"1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description =",
"= QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name,",
"self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\")",
"class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox =",
"self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0) self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label,",
"1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog): NewShelfDialog.setWindowTitle(QCoreApplication.translate(\"NewShelfDialog\", u\"New Shelf\",",
"-*- coding: utf-8 -*- ################################################################################ ## Form generated from reading UI file 'new_shelf.ui'",
"self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0, 0)",
"* from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName():",
"this file will be lost when recompiling UI file! ################################################################################ from PySide6.QtCore import",
"from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\")",
"1, 1, 1) self.label_2 = QLabel(self.widget) self.label_2.setObjectName(u\"label_2\") self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1) self.shelf_description",
"QTextEdit(self.widget) self.shelf_description.setObjectName(u\"shelf_description\") self.gridLayout.addWidget(self.shelf_description, 1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi",
"UI file 'new_shelf.ui' ## ## Created by: Qt User Interface Compiler version 6.1.0",
"Created by: Qt User Interface Compiler version 6.1.0 ## ## WARNING! All changes",
"self.label = QLabel(self.widget) self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\")",
"utf-8 -*- ################################################################################ ## Form generated from reading UI file 'new_shelf.ui' ## ##",
"self.widget = QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0,",
"not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32))",
"QWidget(NewShelfDialog) self.widget.setObjectName(u\"widget\") self.widget.setGeometry(QRect(50, 10, 324, 225)) self.gridLayout = QGridLayout(self.widget) self.gridLayout.setObjectName(u\"gridLayout\") self.gridLayout.setContentsMargins(0, 0, 0,",
"by: Qt User Interface Compiler version 6.1.0 ## ## WARNING! All changes made",
"NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240,",
"## Created by: Qt User Interface Compiler version 6.1.0 ## ## WARNING! All",
"setupUi(self, NewShelfDialog): if not NewShelfDialog.objectName(): NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30,",
"self.label.setObjectName(u\"label\") self.gridLayout.addWidget(self.label, 0, 0, 1, 1) self.shelf_name = QLineEdit(self.widget) self.shelf_name.setObjectName(u\"shelf_name\") self.gridLayout.addWidget(self.shelf_name, 0, 1,",
"file! ################################################################################ from PySide6.QtCore import * from PySide6.QtGui import * from PySide6.QtWidgets import",
"in this file will be lost when recompiling UI file! ################################################################################ from PySide6.QtCore",
"from PySide6.QtGui import * from PySide6.QtWidgets import * class Ui_NewShelfDialog(object): def setupUi(self, NewShelfDialog):",
"version 6.1.0 ## ## WARNING! All changes made in this file will be",
"NewShelfDialog.setObjectName(u\"NewShelfDialog\") NewShelfDialog.resize(400, 300) self.buttonBox = QDialogButtonBox(NewShelfDialog) self.buttonBox.setObjectName(u\"buttonBox\") self.buttonBox.setGeometry(QRect(30, 240, 341, 32)) self.buttonBox.setOrientation(Qt.Horizontal) self.buttonBox.setStandardButtons(QDialogButtonBox.Cancel|QDialogButtonBox.Ok)",
"# -*- coding: utf-8 -*- ################################################################################ ## Form generated from reading UI file",
"1, 1, 1, 1) self.retranslateUi(NewShelfDialog) self.buttonBox.accepted.connect(NewShelfDialog.accept) self.buttonBox.rejected.connect(NewShelfDialog.reject) QMetaObject.connectSlotsByName(NewShelfDialog) # setupUi def retranslateUi(self, NewShelfDialog):"
] |
[
"Directory that keeps all the output files. prefix : str Prefix of the",
"platform. Parameters ---------- Return ------ platform : str \"linux\" or \"macosx\". \"\"\" from",
"Default: True \"\"\" if wcs is not None: wcs_header = wcs.to_header() data_hdu =",
"in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ =",
"def read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj,",
"Parameters ---------- Return ------ platform : str \"linux\" or \"macosx\". \"\"\" from sys",
"astropy.wcs object, optional World coordinate system information. Default: None header : str, optional",
"ndarray Data to be saved in FITS file. fits_file : str Name of",
"Default: None overwrite : bool, optional Overwrite existing file or not. Default: True",
"linux_or_mac(): \"\"\"Check the current platform. Parameters ---------- Return ------ platform : str \"linux\"",
"_ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ = [os.remove(bin)",
"object to a cPickle/Pickle format binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output,",
"in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ =",
"wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header",
"file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object to a cPickle/Pickle",
"pickle from astropy.io import fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse',",
"Overwrite existing file or not. Default: True \"\"\" if wcs is not None:",
"from sys import platform if platform == \"linux\" or platform == \"linux2\": return",
"of the file. remove_bin : bool, optional Remove the output binary table or",
"be saved in FITS file. fits_file : str Name of the FITS file.",
"return \"linux\" elif platform == \"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry, only",
"tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ = [os.remove(bin) for bin in glob.glob(\"{}/{}*.bin\".format(folder,",
"output.close() return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to FITS",
"remove_bin : bool, optional Remove the output binary table or not. Default: False",
"def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to FITS file. Parameters",
"str Directory that keeps all the output files. prefix : str Prefix of",
"-*- \"\"\"Useful tools.\"\"\" import os import glob import pickle from astropy.io import fits",
"Header information. Default: None overwrite : bool, optional Overwrite existing file or not.",
"for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))]",
"str Name of the FITS file. wcs : astropy.wcs object, optional World coordinate",
"\"\"\"Read the data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save",
"wcs is not None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu",
"data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current platform. Parameters ---------- Return ------",
"\"\"\" from sys import platform if platform == \"linux\" or platform == \"linux2\":",
"sys import platform if platform == \"linux\" or platform == \"linux2\": return \"linux\"",
"optional Header information. Default: None overwrite : bool, optional Overwrite existing file or",
"in header: data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return",
"format binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return def",
"keeps all the output files. prefix : str Prefix of the file. remove_bin",
"support Linux and MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the",
"to a cPickle/Pickle format binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output, protocol=2)",
"FITS file. Parameters ---------- data : ndarray Data to be saved in FITS",
"= fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header is not None: if",
"cPickle/Pickle format binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return",
"= fits.PrimaryHDU(data) if header is not None: if 'SIMPLE' in header and 'BITPIX'",
"\"\"\"Save a NDarray to FITS file. Parameters ---------- data : ndarray Data to",
"after ellipse run. Parameters ---------- folder : str Directory that keeps all the",
"header : str, optional Header information. Default: None overwrite : bool, optional Overwrite",
"= [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab in",
"[os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ = [os.remove(bin) for bin",
"---------- data : ndarray Data to be saved in FITS file. fits_file :",
"fits_file : str Name of the FITS file. wcs : astropy.wcs object, optional",
"# -*- coding: utf-8 -*- \"\"\"Useful tools.\"\"\" import os import glob import pickle",
"data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header is not None:",
"os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current platform. Parameters ----------",
"= [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl in",
"save_to_pickle(obj, name): \"\"\"Save an object to a cPickle/Pickle format binary file.\"\"\" output =",
"output binary table or not. Default: False \"\"\" _ = [os.remove(par) for par",
"None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if",
"'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data from Pickle",
"None: if 'SIMPLE' in header and 'BITPIX' in header: data_hdu.header = header else:",
"header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the",
": str, optional Header information. Default: None overwrite : bool, optional Overwrite existing",
"header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header is not None: if 'SIMPLE' in",
"the current platform. Parameters ---------- Return ------ platform : str \"linux\" or \"macosx\".",
"prefix))] _ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for",
"os import glob import pickle from astropy.io import fits __all__ = [ 'read_from_pickle',",
"open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True):",
"fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to FITS file. Parameters ---------- data",
"'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\"",
"an object to a cPickle/Pickle format binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj,",
": str Prefix of the file. remove_bin : bool, optional Remove the output",
"Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object to a",
"system information. Default: None header : str, optional Header information. Default: None overwrite",
"information. Default: None overwrite : bool, optional Overwrite existing file or not. Default:",
"header and 'BITPIX' in header: data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file)",
"files after ellipse run. Parameters ---------- folder : str Directory that keeps all",
"import os import glob import pickle from astropy.io import fits __all__ = [",
"object, optional World coordinate system information. Default: None header : str, optional Header",
"or \"macosx\". \"\"\" from sys import platform if platform == \"linux\" or platform",
"glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img)",
"[ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data from",
"python # -*- coding: utf-8 -*- \"\"\"Useful tools.\"\"\" import os import glob import",
"\"\"\"Save an object to a cPickle/Pickle format binary file.\"\"\" output = open(name, 'wb')",
"fits.PrimaryHDU(data) if header is not None: if 'SIMPLE' in header and 'BITPIX' in",
"fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read",
"the file. remove_bin : bool, optional Remove the output binary table or not.",
"is not None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu =",
"optional Remove the output binary table or not. Default: False \"\"\" _ =",
"output, protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray",
"__all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the",
"---------- Return ------ platform : str \"linux\" or \"macosx\". \"\"\" from sys import",
"the data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an",
"from astropy.io import fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ]",
"if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current platform. Parameters",
"platform == \"linux\" or platform == \"linux2\": return \"linux\" elif platform == \"darwin\":",
"import glob import pickle from astropy.io import fits __all__ = [ 'read_from_pickle', 'save_to_pickle',",
"run. Parameters ---------- folder : str Directory that keeps all the output files.",
"Parameters ---------- folder : str Directory that keeps all the output files. prefix",
"str, optional Header information. Default: None overwrite : bool, optional Overwrite existing file",
"data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current platform.",
"Linux and MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary",
"def linux_or_mac(): \"\"\"Check the current platform. Parameters ---------- Return ------ platform : str",
"only support Linux and MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all",
"def save_to_pickle(obj, name): \"\"\"Save an object to a cPickle/Pickle format binary file.\"\"\" output",
"\"linux\" elif platform == \"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry, only support",
"return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object to a cPickle/Pickle format",
"-*- coding: utf-8 -*- \"\"\"Useful tools.\"\"\" import os import glob import pickle from",
"import platform if platform == \"linux\" or platform == \"linux2\": return \"linux\" elif",
"import pickle from astropy.io import fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac',",
"'SIMPLE' in header and 'BITPIX' in header: data_hdu.header = header else: data_hdu.header.extend(header) if",
"current platform. Parameters ---------- Return ------ platform : str \"linux\" or \"macosx\". \"\"\"",
"the unecessary files after ellipse run. Parameters ---------- folder : str Directory that",
"not None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data)",
"table or not. Default: False \"\"\" _ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder,",
"wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header is not",
"\"macosx\" else: raise TypeError(\"# Sorry, only support Linux and MacOSX for now!\") def",
"and MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary files",
"file. remove_bin : bool, optional Remove the output binary table or not. Default:",
"Data to be saved in FITS file. fits_file : str Name of the",
"== \"linux2\": return \"linux\" elif platform == \"darwin\": return \"macosx\" else: raise TypeError(\"#",
"folder : str Directory that keeps all the output files. prefix : str",
"Default: False \"\"\" _ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ =",
"par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _",
"---------- folder : str Directory that keeps all the output files. prefix :",
"protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to",
"data_hdu = fits.PrimaryHDU(data) if header is not None: if 'SIMPLE' in header and",
"img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if",
"not. Default: False \"\"\" _ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _",
"optional Overwrite existing file or not. Default: True \"\"\" if wcs is not",
"astropy.io import fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def",
"existing file or not. Default: True \"\"\" if wcs is not None: wcs_header",
"World coordinate system information. Default: None header : str, optional Header information. Default:",
"\"\"\"Check the current platform. Parameters ---------- Return ------ platform : str \"linux\" or",
"wcs : astropy.wcs object, optional World coordinate system information. Default: None header :",
"that keeps all the output files. prefix : str Prefix of the file.",
"Remove the output binary table or not. Default: False \"\"\" _ = [os.remove(par)",
": ndarray Data to be saved in FITS file. fits_file : str Name",
"tools.\"\"\" import os import glob import pickle from astropy.io import fits __all__ =",
"import fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name):",
"glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _",
"fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header is not None: if 'SIMPLE'",
"Return ------ platform : str \"linux\" or \"macosx\". \"\"\" from sys import platform",
"str Prefix of the file. remove_bin : bool, optional Remove the output binary",
"in header and 'BITPIX' in header: data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file):",
"os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current platform. Parameters ---------- Return",
"= [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data",
"files. prefix : str Prefix of the file. remove_bin : bool, optional Remove",
"platform : str \"linux\" or \"macosx\". \"\"\" from sys import platform if platform",
"saved in FITS file. fits_file : str Name of the FITS file. wcs",
"\"linux\" or platform == \"linux2\": return \"linux\" elif platform == \"darwin\": return \"macosx\"",
"optional World coordinate system information. Default: None header : str, optional Header information.",
"platform == \"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry, only support Linux and",
"if wcs is not None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else:",
"in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ = [os.remove(bin) for bin in glob.glob(\"{}/{}*.bin\".format(folder, prefix))]",
"Default: None header : str, optional Header information. Default: None overwrite : bool,",
"file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data, fits_file,",
"a cPickle/Pickle format binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close()",
"overwrite : bool, optional Overwrite existing file or not. Default: True \"\"\" if",
"\"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry, only support Linux and MacOSX for",
"wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to FITS file. Parameters ---------- data :",
"platform if platform == \"linux\" or platform == \"linux2\": return \"linux\" elif platform",
"all the output files. prefix : str Prefix of the file. remove_bin :",
"bool, optional Remove the output binary table or not. Default: False \"\"\" _",
"#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\"Useful tools.\"\"\" import os import glob",
"header=None, overwrite=True): \"\"\"Save a NDarray to FITS file. Parameters ---------- data : ndarray",
"for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))]",
"platform == \"linux2\": return \"linux\" elif platform == \"darwin\": return \"macosx\" else: raise",
"[os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder,",
"raise TypeError(\"# Sorry, only support Linux and MacOSX for now!\") def clean_after_ellipse(folder, prefix,",
"Sorry, only support Linux and MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean",
"_ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab",
"------ platform : str \"linux\" or \"macosx\". \"\"\" from sys import platform if",
"elif platform == \"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry, only support Linux",
"return def linux_or_mac(): \"\"\"Check the current platform. Parameters ---------- Return ------ platform :",
"coding: utf-8 -*- \"\"\"Useful tools.\"\"\" import os import glob import pickle from astropy.io",
"Parameters ---------- data : ndarray Data to be saved in FITS file. fits_file",
"'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\"))",
"Name of the FITS file. wcs : astropy.wcs object, optional World coordinate system",
"True \"\"\" if wcs is not None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data,",
"the FITS file. wcs : astropy.wcs object, optional World coordinate system information. Default:",
"clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary files after ellipse run. Parameters ----------",
"prefix))] _ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ =",
"for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))]",
"now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary files after ellipse run.",
"binary file.\"\"\" output = open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data,",
"Prefix of the file. remove_bin : bool, optional Remove the output binary table",
"file or not. Default: True \"\"\" if wcs is not None: wcs_header =",
"\"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object to a cPickle/Pickle format binary file.\"\"\"",
"file. wcs : astropy.wcs object, optional World coordinate system information. Default: None header",
"utf-8 -*- \"\"\"Useful tools.\"\"\" import os import glob import pickle from astropy.io import",
"to FITS file. Parameters ---------- data : ndarray Data to be saved in",
"_ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for pkl",
"str \"linux\" or \"macosx\". \"\"\" from sys import platform if platform == \"linux\"",
": bool, optional Remove the output binary table or not. Default: False \"\"\"",
"to be saved in FITS file. fits_file : str Name of the FITS",
"a NDarray to FITS file. Parameters ---------- data : ndarray Data to be",
"== \"linux\" or platform == \"linux2\": return \"linux\" elif platform == \"darwin\": return",
"name): \"\"\"Save an object to a cPickle/Pickle format binary file.\"\"\" output = open(name,",
"and 'BITPIX' in header: data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file,",
"return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to FITS file.",
"not. Default: True \"\"\" if wcs is not None: wcs_header = wcs.to_header() data_hdu",
": astropy.wcs object, optional World coordinate system information. Default: None header : str,",
"data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac():",
"unecessary files after ellipse run. Parameters ---------- folder : str Directory that keeps",
"prefix : str Prefix of the file. remove_bin : bool, optional Remove the",
"pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object to a cPickle/Pickle format binary",
"None header : str, optional Header information. Default: None overwrite : bool, optional",
"save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a NDarray to FITS file. Parameters ----------",
"for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary files after ellipse",
"[os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder,",
"from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object to",
"ellipse run. Parameters ---------- folder : str Directory that keeps all the output",
"\"\"\" _ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl) for",
"in FITS file. fits_file : str Name of the FITS file. wcs :",
"prefix))] _ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for",
"overwrite=True): \"\"\"Save a NDarray to FITS file. Parameters ---------- data : ndarray Data",
"output files. prefix : str Prefix of the file. remove_bin : bool, optional",
": str Name of the FITS file. wcs : astropy.wcs object, optional World",
"None overwrite : bool, optional Overwrite existing file or not. Default: True \"\"\"",
"MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary files after",
"'wb') pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save",
"prefix, remove_bin=False): \"\"\"Clean all the unecessary files after ellipse run. Parameters ---------- folder",
"= [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ = [os.remove(bin) for",
"file. Parameters ---------- data : ndarray Data to be saved in FITS file.",
"file. fits_file : str Name of the FITS file. wcs : astropy.wcs object,",
"data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name): \"\"\"Save an object",
"= open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None, header=None,",
"information. Default: None header : str, optional Header information. Default: None overwrite :",
"if platform == \"linux\" or platform == \"linux2\": return \"linux\" elif platform ==",
"'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\" return pickle.load(open(name,",
"def clean_after_ellipse(folder, prefix, remove_bin=False): \"\"\"Clean all the unecessary files after ellipse run. Parameters",
"glob import pickle from astropy.io import fits __all__ = [ 'read_from_pickle', 'save_to_pickle', 'save_to_fits',",
"glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab)",
"FITS file. wcs : astropy.wcs object, optional World coordinate system information. Default: None",
"is not None: if 'SIMPLE' in header and 'BITPIX' in header: data_hdu.header =",
"else: raise TypeError(\"# Sorry, only support Linux and MacOSX for now!\") def clean_after_ellipse(folder,",
"in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _ = [os.remove(tab) for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin:",
"= [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img in",
"\"linux\" or \"macosx\". \"\"\" from sys import platform if platform == \"linux\" or",
"== \"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry, only support Linux and MacOSX",
"return \"macosx\" else: raise TypeError(\"# Sorry, only support Linux and MacOSX for now!\")",
"[os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder,",
"or not. Default: True \"\"\" if wcs is not None: wcs_header = wcs.to_header()",
"'save_to_fits', 'linux_or_mac', 'clean_after_ellipse', ] def read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\" return",
": str Directory that keeps all the output files. prefix : str Prefix",
"the output binary table or not. Default: False \"\"\" _ = [os.remove(par) for",
"pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None, header=None, overwrite=True): \"\"\"Save a",
"else: data_hdu = fits.PrimaryHDU(data) if header is not None: if 'SIMPLE' in header",
"not None: if 'SIMPLE' in header and 'BITPIX' in header: data_hdu.header = header",
"if 'SIMPLE' in header and 'BITPIX' in header: data_hdu.header = header else: data_hdu.header.extend(header)",
"else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current",
": str \"linux\" or \"macosx\". \"\"\" from sys import platform if platform ==",
"header: data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def",
"the output files. prefix : str Prefix of the file. remove_bin : bool,",
"] def read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def",
"data : ndarray Data to be saved in FITS file. fits_file : str",
": bool, optional Overwrite existing file or not. Default: True \"\"\" if wcs",
"FITS file. fits_file : str Name of the FITS file. wcs : astropy.wcs",
"of the FITS file. wcs : astropy.wcs object, optional World coordinate system information.",
"all the unecessary files after ellipse run. Parameters ---------- folder : str Directory",
"_ = [os.remove(pkl) for pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img",
"\"linux2\": return \"linux\" elif platform == \"darwin\": return \"macosx\" else: raise TypeError(\"# Sorry,",
"'BITPIX' in header: data_hdu.header = header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite)",
"pkl in glob.glob(\"{}/{}*.pkl\".format(folder, prefix))] _ = [os.remove(img) for img in glob.glob(\"{}/{}*.fits\".format(folder, prefix))] _",
"\"\"\" if wcs is not None: wcs_header = wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header)",
"output = open(name, 'wb') pickle.dump(obj, output, protocol=2) output.close() return def save_to_fits(data, fits_file, wcs=None,",
"overwrite=overwrite) return def linux_or_mac(): \"\"\"Check the current platform. Parameters ---------- Return ------ platform",
"or platform == \"linux2\": return \"linux\" elif platform == \"darwin\": return \"macosx\" else:",
"TypeError(\"# Sorry, only support Linux and MacOSX for now!\") def clean_after_ellipse(folder, prefix, remove_bin=False):",
"remove_bin=False): \"\"\"Clean all the unecessary files after ellipse run. Parameters ---------- folder :",
"= header else: data_hdu.header.extend(header) if os.path.islink(fits_file): os.unlink(fits_file) data_hdu.writeto(fits_file, overwrite=overwrite) return def linux_or_mac(): \"\"\"Check",
"or not. Default: False \"\"\" _ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))]",
"if header is not None: if 'SIMPLE' in header and 'BITPIX' in header:",
"header is not None: if 'SIMPLE' in header and 'BITPIX' in header: data_hdu.header",
"\"macosx\". \"\"\" from sys import platform if platform == \"linux\" or platform ==",
"bool, optional Overwrite existing file or not. Default: True \"\"\" if wcs is",
"= wcs.to_header() data_hdu = fits.PrimaryHDU(data, header=wcs_header) else: data_hdu = fits.PrimaryHDU(data) if header is",
"binary table or not. Default: False \"\"\" _ = [os.remove(par) for par in",
"\"\"\"Clean all the unecessary files after ellipse run. Parameters ---------- folder : str",
"read_from_pickle(name): \"\"\"Read the data from Pickle file.\"\"\" return pickle.load(open(name, \"rb\")) def save_to_pickle(obj, name):",
"for tab in glob.glob(\"{}/{}*.tab\".format(folder, prefix))] if remove_bin: _ = [os.remove(bin) for bin in",
"NDarray to FITS file. Parameters ---------- data : ndarray Data to be saved",
"False \"\"\" _ = [os.remove(par) for par in glob.glob(\"{}/{}*.par\".format(folder, prefix))] _ = [os.remove(pkl)",
"\"\"\"Useful tools.\"\"\" import os import glob import pickle from astropy.io import fits __all__",
"coordinate system information. Default: None header : str, optional Header information. Default: None"
] |
[
"for gchange in GradeChange.objects.all(): if gchange.flow_session is not None: gchange.attempt_id = \"flow-session-%d\" %",
"from __future__ import unicode_literals import django django.setup() from course.models import GradeChange for gchange",
"import unicode_literals import django django.setup() from course.models import GradeChange for gchange in GradeChange.objects.all():",
"course.models import GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session is not None: gchange.attempt_id",
"utf-8 -*- from __future__ import unicode_literals import django django.setup() from course.models import GradeChange",
"# -*- coding: utf-8 -*- from __future__ import unicode_literals import django django.setup() from",
"-*- from __future__ import unicode_literals import django django.setup() from course.models import GradeChange for",
"coding: utf-8 -*- from __future__ import unicode_literals import django django.setup() from course.models import",
"from course.models import GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session is not None:",
"django.setup() from course.models import GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session is not",
"-*- coding: utf-8 -*- from __future__ import unicode_literals import django django.setup() from course.models",
"GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session is not None: gchange.attempt_id = \"flow-session-%d\"",
"django django.setup() from course.models import GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session is",
"__future__ import unicode_literals import django django.setup() from course.models import GradeChange for gchange in",
"in GradeChange.objects.all(): if gchange.flow_session is not None: gchange.attempt_id = \"flow-session-%d\" % gchange.flow_session.id gchange.save()",
"gchange in GradeChange.objects.all(): if gchange.flow_session is not None: gchange.attempt_id = \"flow-session-%d\" % gchange.flow_session.id",
"import GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session is not None: gchange.attempt_id =",
"import django django.setup() from course.models import GradeChange for gchange in GradeChange.objects.all(): if gchange.flow_session",
"<filename>update-attempt-ids.py # -*- coding: utf-8 -*- from __future__ import unicode_literals import django django.setup()",
"unicode_literals import django django.setup() from course.models import GradeChange for gchange in GradeChange.objects.all(): if"
] |
[
"lines = [line for line in f] except: help() code = \"\".join(lines) pc",
"md and py: print(\"```python\") if py: print(pc.python) if md and py: print(\"```\\n\") if",
"as f: lines = [line for line in f] except: help() code =",
"for line in f] except: help() code = \"\".join(lines) pc = Pseudocode(code) if",
"print(\"```\") print(pc) print(\"```\\n\") if md and py: print(\"```python\") if py: print(pc.python) if md",
"< 2: help() md = \"-md\" in sys.argv py = \"-py\" in sys.argv",
"python -m ibdp_classes [options] filename Options: -md Output markdown. -py Output intermediate Python",
"and py: print(\"```python\") if py: print(pc.python) if md and py: print(\"```\\n\") if py",
"if py: print(pc.python) if md and py: print(\"```\\n\") if py and not md:",
"in f] except: help() code = \"\".join(lines) pc = Pseudocode(code) if md: print(\"```\")",
"use: python -m ibdp_classes [options] filename Options: -md Output markdown. -py Output intermediate",
") exit(0) if len(sys.argv) < 2: help() md = \"-md\" in sys.argv py",
"except: help() code = \"\".join(lines) pc = Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\")",
"from .ibdp_classes import Pseudocode def run(): def help(): print( \"\"\" To use: python",
"py: print(\"```python\") if py: print(pc.python) if md and py: print(\"```\\n\") if py and",
"file_name = sys.argv[-1] try: with open(file_name) as f: lines = [line for line",
"-md Output markdown. -py Output intermediate Python code. \"\"\" ) exit(0) if len(sys.argv)",
"if len(sys.argv) < 2: help() md = \"-md\" in sys.argv py = \"-py\"",
"-m ibdp_classes [options] filename Options: -md Output markdown. -py Output intermediate Python code.",
"print(\"```python\") if py: print(pc.python) if md and py: print(\"```\\n\") if py and not",
"\"\"\" ) exit(0) if len(sys.argv) < 2: help() md = \"-md\" in sys.argv",
"sys.argv[-1] try: with open(file_name) as f: lines = [line for line in f]",
"pc = Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if md and py: print(\"```python\")",
"def help(): print( \"\"\" To use: python -m ibdp_classes [options] filename Options: -md",
"[line for line in f] except: help() code = \"\".join(lines) pc = Pseudocode(code)",
"print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md: print(\"```\") if __name__ == \"__main__\": run()",
"\"-py\" in sys.argv file_name = sys.argv[-1] try: with open(file_name) as f: lines =",
"-py Output intermediate Python code. \"\"\" ) exit(0) if len(sys.argv) < 2: help()",
"run(): def help(): print( \"\"\" To use: python -m ibdp_classes [options] filename Options:",
"import sys from .ibdp_classes import Pseudocode def run(): def help(): print( \"\"\" To",
"= Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if md and py: print(\"```python\") if",
"import Pseudocode def run(): def help(): print( \"\"\" To use: python -m ibdp_classes",
"code = \"\".join(lines) pc = Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if md",
"= \"\".join(lines) pc = Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if md and",
"help() code = \"\".join(lines) pc = Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if",
"py = \"-py\" in sys.argv file_name = sys.argv[-1] try: with open(file_name) as f:",
"Output markdown. -py Output intermediate Python code. \"\"\" ) exit(0) if len(sys.argv) <",
"with open(file_name) as f: lines = [line for line in f] except: help()",
".ibdp_classes import Pseudocode def run(): def help(): print( \"\"\" To use: python -m",
"help() md = \"-md\" in sys.argv py = \"-py\" in sys.argv file_name =",
"print(pc.python) if md and py: print(\"```\\n\") if py and not md: print(\"\\n\\n\") if",
"and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md: print(\"```\") if __name__",
"print(pc) print(\"```\\n\") if md and py: print(\"```python\") if py: print(pc.python) if md and",
"md = \"-md\" in sys.argv py = \"-py\" in sys.argv file_name = sys.argv[-1]",
"f: lines = [line for line in f] except: help() code = \"\".join(lines)",
"Python code. \"\"\" ) exit(0) if len(sys.argv) < 2: help() md = \"-md\"",
"def run(): def help(): print( \"\"\" To use: python -m ibdp_classes [options] filename",
"if md and py: print(\"```python\") if py: print(pc.python) if md and py: print(\"```\\n\")",
"Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if md and py: print(\"```python\") if py:",
"ibdp_classes [options] filename Options: -md Output markdown. -py Output intermediate Python code. \"\"\"",
"= \"-py\" in sys.argv file_name = sys.argv[-1] try: with open(file_name) as f: lines",
"len(sys.argv) < 2: help() md = \"-md\" in sys.argv py = \"-py\" in",
"not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md: print(\"```\") if __name__ ==",
"sys.argv file_name = sys.argv[-1] try: with open(file_name) as f: lines = [line for",
"Output intermediate Python code. \"\"\" ) exit(0) if len(sys.argv) < 2: help() md",
"help(): print( \"\"\" To use: python -m ibdp_classes [options] filename Options: -md Output",
"filename Options: -md Output markdown. -py Output intermediate Python code. \"\"\" ) exit(0)",
"print(\"```\\n\") if md and py: print(\"```python\") if py: print(pc.python) if md and py:",
"\"\"\" To use: python -m ibdp_classes [options] filename Options: -md Output markdown. -py",
"md: print(\"```\") print(pc) print(\"```\\n\") if md and py: print(\"```python\") if py: print(pc.python) if",
"print( \"\"\" To use: python -m ibdp_classes [options] filename Options: -md Output markdown.",
"To use: python -m ibdp_classes [options] filename Options: -md Output markdown. -py Output",
"line in f] except: help() code = \"\".join(lines) pc = Pseudocode(code) if md:",
"if py and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md: print(\"```\")",
"if md and py: print(\"```\\n\") if py and not md: print(\"\\n\\n\") if md:",
"[options] filename Options: -md Output markdown. -py Output intermediate Python code. \"\"\" )",
"print(\"```\\n\") if py and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md:",
"and py: print(\"```\\n\") if py and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc())",
"py and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md: print(\"```\") if",
"py: print(\"```\\n\") if py and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if",
"\"\".join(lines) pc = Pseudocode(code) if md: print(\"```\") print(pc) print(\"```\\n\") if md and py:",
"= [line for line in f] except: help() code = \"\".join(lines) pc =",
"code. \"\"\" ) exit(0) if len(sys.argv) < 2: help() md = \"-md\" in",
"py: print(pc.python) if md and py: print(\"```\\n\") if py and not md: print(\"\\n\\n\")",
"\"-md\" in sys.argv py = \"-py\" in sys.argv file_name = sys.argv[-1] try: with",
"sys from .ibdp_classes import Pseudocode def run(): def help(): print( \"\"\" To use:",
"intermediate Python code. \"\"\" ) exit(0) if len(sys.argv) < 2: help() md =",
"if md: print(\"```\") print(pc) print(\"```\\n\") if md and py: print(\"```python\") if py: print(pc.python)",
"Options: -md Output markdown. -py Output intermediate Python code. \"\"\" ) exit(0) if",
"try: with open(file_name) as f: lines = [line for line in f] except:",
"markdown. -py Output intermediate Python code. \"\"\" ) exit(0) if len(sys.argv) < 2:",
"exit(0) if len(sys.argv) < 2: help() md = \"-md\" in sys.argv py =",
"in sys.argv py = \"-py\" in sys.argv file_name = sys.argv[-1] try: with open(file_name)",
"Pseudocode def run(): def help(): print( \"\"\" To use: python -m ibdp_classes [options]",
"= sys.argv[-1] try: with open(file_name) as f: lines = [line for line in",
"= \"-md\" in sys.argv py = \"-py\" in sys.argv file_name = sys.argv[-1] try:",
"f] except: help() code = \"\".join(lines) pc = Pseudocode(code) if md: print(\"```\") print(pc)",
"in sys.argv file_name = sys.argv[-1] try: with open(file_name) as f: lines = [line",
"md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\") print(pc()) if md: print(\"```\") if __name__ == \"__main__\":",
"2: help() md = \"-md\" in sys.argv py = \"-py\" in sys.argv file_name",
"open(file_name) as f: lines = [line for line in f] except: help() code",
"md and py: print(\"```\\n\") if py and not md: print(\"\\n\\n\") if md: print(\"Output:\\n\\n```\")",
"sys.argv py = \"-py\" in sys.argv file_name = sys.argv[-1] try: with open(file_name) as"
] |
[
"beamNumber self.column = column self.beam = beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam",
"cordinate = [ [1,2,1,2,1], [2,0,2,0,2], [1,2,1,2,1] ] before use this class,you must to",
"[1,2,1,2,1], [2,0,2,0,2], [1,2,1,2,1] ] before use this class,you must to create column and",
"* self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam self.cordinate = cordinate def showAll(self): print('everyting')",
"columnNumber self.beamNumber = beamNumber self.column = column self.beam = beam self.totalVoiceColumn = self.column.volume",
"[2,0,2,0,2], [1,2,1,2,1] ] before use this class,you must to create column and beam",
"= beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice",
"] before use this class,you must to create column and beam by using",
"[1,2,1,2,1] ] before use this class,you must to create column and beam by",
"column self.beam = beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume *",
"<filename>floor.py \"\"\" cordinate = [ [1,2,1,2,1], [2,0,2,0,2], [1,2,1,2,1] ] before use this class,you",
"beam by using column and beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber",
"self.beamNumber = beamNumber self.column = column self.beam = beam self.totalVoiceColumn = self.column.volume *",
"using column and beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber",
"beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice =",
"create column and beam by using column and beam class \"\"\" class floor():",
"self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam self.cordinate = cordinate def showAll(self):",
"use this class,you must to create column and beam by using column and",
"self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam self.cordinate =",
"class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber self.column",
"= self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn +",
"self.columnNumber = columnNumber self.beamNumber = beamNumber self.column = column self.beam = beam self.totalVoiceColumn",
"self.column = column self.beam = beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam =",
"= column self.beam = beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume",
"to create column and beam by using column and beam class \"\"\" class",
"column and beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber",
"= [ [1,2,1,2,1], [2,0,2,0,2], [1,2,1,2,1] ] before use this class,you must to create",
"= beamNumber self.column = column self.beam = beam self.totalVoiceColumn = self.column.volume * self.columnNumber",
"* self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam self.cordinate",
"class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber self.column = column",
"and beam by using column and beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate):",
"self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam self.cordinate = cordinate",
"= columnNumber self.beamNumber = beamNumber self.column = column self.beam = beam self.totalVoiceColumn =",
"beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber",
"\"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber self.column =",
"this class,you must to create column and beam by using column and beam",
"class,you must to create column and beam by using column and beam class",
"column and beam by using column and beam class \"\"\" class floor(): def",
"by using column and beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber =",
"must to create column and beam by using column and beam class \"\"\"",
"floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber self.column = column self.beam",
"= self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam self.cordinate = cordinate def",
"self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn",
"before use this class,you must to create column and beam by using column",
"self.beam = beam self.totalVoiceColumn = self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber",
"[ [1,2,1,2,1], [2,0,2,0,2], [1,2,1,2,1] ] before use this class,you must to create column",
"__init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber self.column = column self.beam = beam",
"\"\"\" cordinate = [ [1,2,1,2,1], [2,0,2,0,2], [1,2,1,2,1] ] before use this class,you must",
"def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber = beamNumber self.column = column self.beam =",
"self.column.volume * self.columnNumber self.totalVoiceBeam = self.beam.volume * self.beamNumber self.totalVoice = self.totalVoiceColumn + self.totalVoiceBeam",
"and beam class \"\"\" class floor(): def __init__(self,column,columnNumber,beam,beamNumber,cordinate): self.columnNumber = columnNumber self.beamNumber ="
] |
[
"in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element",
"elif len(line)!=0 and (model == 'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line),",
"< len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'): len_human = len(store_human_data) len_machine",
"(model == 'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if",
"'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%')",
"< len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model ==",
"problem. Currently, fake news articles are written by humans, but recently-introduced AI technology",
"store_human_data, store_machine_data) count +=1 elif len(line)!=0 and (model == 'gpt2'): len_human = len(store_human_data)",
"json_file: while True: line = json_file.readline() if len(line)!=0 and (model == 'groverAI'): #print(line)",
"maxtry > 0: res = req.sendRequest(news.get('article')) maxtry = maxtry - 1 except: print(\"Internet",
"store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif",
"#ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split())",
"detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n')",
"#detectGPT2(human_data, driver) elif model == 'fakebox': req.init() else: print(\"Not supported as yet! TODO:CTRL,",
"temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label = 'human' else: label =",
"True: line = json_file.readline() if len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver,",
"sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if",
"len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine",
"label == news['label']: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if",
"30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not in element.text.split()) and ((news['label'] +",
"else: label = 'machine' if label == news['label']: if label == 'human': store_human_data.append(news)",
"news. Our goal is to reliably detect this “neural fake news” so that",
"elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'): len_human =",
"element.text.split()) and ( (news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else:",
"fake news” so that its harm can be minimized.\" from selenium import webdriver",
"machine_file.close() ''' with open(save_human_file, \"w\") as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with",
"len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close()",
"webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions",
"articles are written by humans, but recently-introduced AI technology based on Neural Networks",
"= [] machine_data = [] driver = webdriver.Firefox() #command-line argument parsing parser =",
"based on Neural Networks might enable adversaries to generate fake news. Our goal",
"else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver,",
"store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else:",
"len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file,",
"== 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if",
"0: res = req.sendRequest(news.get('article')) maxtry = maxtry - 1 except: print(\"Internet Error!Sleep 3",
"this “neural fake news” so that its harm can be minimized.\") #ans =",
"open(save_machine_file, \"a+\") with open(file_name) as json_file: while True: line = json_file.readline() if len(line)!=0",
"selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By",
"element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def",
"seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC",
"'human' else: label = 'machine' #if label not in news['label']: # print(news['article'], label,",
"count +=1 elif len(line)!=0 and (model == 'gpt2'): len_human = len(store_human_data) len_machine =",
"temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label =",
"print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\")",
"detect this “neural fake news” so that its harm can be minimized.\") #ans",
"detectGrover(news, driver, store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit()",
"machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as json_file:",
"= 'human' else: label = 'machine' if label == news['label']: if label ==",
"store_human_data, store_machine_data): maxtry = 10 res = 0 label = \"\" try: while",
"argparse import req #initialization human_data = [] machine_data = [] driver = webdriver.Firefox()",
"driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox': req.init() else: print(\"Not supported as yet!",
"news” so that its harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element =",
"store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for each in store_machine_data: json_file.write(str(json.dumps(each))+'\\n') json_file.close()",
"element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in element.text.split())",
"= open(save_machine_file, \"a+\") with open(file_name) as json_file: while True: line = json_file.readline() if",
"emerged as a major societal problem. Currently, fake news articles are written by",
"store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res = 0 label",
"float(temp[0]) > 50: label = 'human' else: label = 'machine' #if label not",
"on Neural Networks might enable adversaries to generate fake news. Our goal is",
"= parser.parse_args() model = args.model file_name = args.file_name save_human_file = args.save_human_file save_machine_file =",
"store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in",
"== 'impartial' or ((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)): label = 'human'",
"if (news['label'] not in element.text.split()) and ( (news['label'] + \".\") not in element.text.split()[-1]):",
"print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article'])",
"len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model",
"'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox': req.init() else: print(\"Not supported as",
"FakeBox\") #temporary i = 0 count = 0 #input read human_file = open(save_human_file,",
"in element.text.split()) and ( (news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label'])",
"len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif",
"iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not",
"'human' else: label = 'machine' if label == news['label']: if label == 'human':",
"can be minimized.\" from selenium import webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui",
"selenium.webdriver.common.by import By import time import json import argparse import req #initialization human_data",
"supported as yet! TODO:CTRL, FakeBox\") #temporary i = 0 count = 0 #input",
"enable adversaries to generate fake news. Our goal is to reliably detect this",
"try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in",
"minimized.\" from selenium import webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait",
"= maxtry - 1 except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if",
"= human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to deceive, has emerged",
"type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model = args.model file_name =",
"Neural Networks might enable adversaries to generate fake news. Our goal is to",
"as a major societal problem. Currently, fake news articles are written by humans,",
"machine_data = [] driver = webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model',",
"human_data = [] machine_data = [] driver = webdriver.Firefox() #command-line argument parsing parser",
"parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model = args.model file_name",
"import json import argparse import req #initialization human_data = [] machine_data = []",
"fake news” so that its harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element",
"= WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in element.text.split()) and",
"if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp =",
"as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for",
"element: print(element.text.split()) if (news['label'] not in element.text.split()) and ((news['label'] + \".\") not in",
"if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close()",
"has emerged as a major societal problem. Currently, fake news articles are written",
"#print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) >",
"args.save_machine_file store_human_data = [] store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation,",
"in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if",
"store_machine_data) count +=1 elif len(line)!=0 and (model == 'gpt2'): len_human = len(store_human_data) len_machine",
"driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not",
"store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys())",
"(news['label'] not in element.text.split()) and ((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(),",
"elif len(line)!=0 and (model == 'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line),",
"in news['label']: # print(news['article'], label, news['label']) #else: if label == 'human': store_human_data.append(news) else:",
"store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0",
"news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news,",
"res = 0 label = \"\" try: while maxtry > 0: res =",
"len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif",
"label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry =",
"# print(news['article'], label, news['label']) #else: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close()",
"print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if res: if res[\"content_decision\"] == 'impartial'",
"and ( (news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if",
"store_machine_data): maxtry = 10 res = 0 label = \"\" try: while maxtry",
"0 count = 0 #input read human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file,",
"len(line)!=0 and (model == 'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data,",
"= len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine",
"res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)): label =",
"print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary i = 0 count = 0",
"ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\")))",
"= 'machine' #if label not in news['label']: # print(news['article'], label, news['label']) #else: if",
"this “neural fake news” so that its harm can be minimized.\" from selenium",
"else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res = 0",
"TODO:CTRL, FakeBox\") #temporary i = 0 count = 0 #input read human_file =",
"while maxtry > 0: res = req.sendRequest(news.get('article')) maxtry = maxtry - 1 except:",
"- 1 except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if res: if",
"WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By import time",
"that its harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME,",
"detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp =",
"driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label",
"req.sendRequest(news.get('article')) maxtry = maxtry - 1 except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry)",
"count = 0 #input read human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\")",
"driver, store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn",
"store_human_data.append(news) else: store_machine_data.append(news) #model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif",
"Networks might enable adversaries to generate fake news. Our goal is to reliably",
"#ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data):",
"if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and",
"open(save_human_file, \"w\") as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as",
"= [] driver = webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str)",
"= args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data = [] store_machine_data =",
"'machine' if label == news['label']: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model",
"from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import",
"(news['label'] not in element.text.split()) and ( (news['label'] + \".\") not in element.text.split()[-1]): print(news['article'],",
"= driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label = 'human' else: label = 'machine'",
"human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to deceive, has emerged as",
"> 0: res = req.sendRequest(news.get('article')) maxtry = maxtry - 1 except: print(\"Internet Error!Sleep",
"len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data):",
"from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By import time import",
"except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear()",
"= 'human' else: label = 'machine' #if label not in news['label']: # print(news['article'],",
"= open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name) as json_file: while True:",
"driver) elif model == 'fakebox': req.init() else: print(\"Not supported as yet! TODO:CTRL, FakeBox\")",
"\"Online disinformation, or fake news intended to deceive, has emerged as a major",
"be minimized.\" from selenium import webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui import",
"sec!\", res, maxtry) time.sleep(3) if res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] ==",
"= driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label']",
"parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args",
"webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str)",
"in element.text.split()) and ((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else:",
"< len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close()",
"#driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res = 0 label =",
"len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as",
"if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry",
"50: label = 'human' else: label = 'machine' #if label not in news['label']:",
"label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\")",
"driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver,",
"= len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine <",
"human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n')",
"fake news articles are written by humans, but recently-introduced AI technology based on",
"save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data = [] store_machine_data = [] #check_now",
"((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)): label = 'human' else: label =",
"\"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name) as json_file: while True: line =",
"len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine <",
"and (res['content_score'] < 0.5)): label = 'human' else: label = 'machine' if label",
"news intended to deceive, has emerged as a major societal problem. Currently, fake",
"store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp",
"open(file_name) as json_file: while True: line = json_file.readline() if len(line)!=0 and (model ==",
"driver, store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n')",
"adversaries to generate fake news. Our goal is to reliably detect this “neural",
"store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended",
"expected_conditions as EC from selenium.webdriver.common.by import By import time import json import argparse",
"not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else:",
"= req.sendRequest(news.get('article')) maxtry = maxtry - 1 except: print(\"Internet Error!Sleep 3 sec!\", res,",
"driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox': req.init()",
"human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'): len_human",
"maxtry = 10 res = 0 label = \"\" try: while maxtry >",
"def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res = 0 label = \"\"",
"store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5)",
"#model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2':",
"harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def",
"yet! TODO:CTRL, FakeBox\") #temporary i = 0 count = 0 #input read human_file",
"+ \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human':",
"By import time import json import argparse import req #initialization human_data = []",
"intended to deceive, has emerged as a major societal problem. Currently, fake news",
"'machine' #if label not in news['label']: # print(news['article'], label, news['label']) #else: if label",
"= \"Online disinformation, or fake news intended to deceive, has emerged as a",
"== 'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if",
"element.text.split()) and ((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if",
"== 'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human",
"elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox': req.init() else:",
"json_file.readline() if len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count",
"else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\")))",
"element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans =",
"'impartial' or ((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)): label = 'human' else:",
"news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element =",
"not in news['label']: # print(news['article'], label, news['label']) #else: if label == 'human': store_human_data.append(news)",
"else: print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary i = 0 count =",
"driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label = 'human' else:",
"if element: print(element.text.split()) if (news['label'] not in element.text.split()) and ((news['label'] + \".\") not",
"selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By import time import json",
"= 0 label = \"\" try: while maxtry > 0: res = req.sendRequest(news.get('article'))",
"store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa",
"or ((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)): label = 'human' else: label",
"if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element",
"detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res = 0 label = \"\" try:",
"type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model = args.model file_name = args.file_name save_human_file",
"label, news['label']) #else: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news,",
"10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article'))",
"== 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data,",
"#initialization human_data = [] machine_data = [] driver = webdriver.Firefox() #command-line argument parsing",
"time.sleep(3) if res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias') and (res['content_score']",
"10 res = 0 label = \"\" try: while maxtry > 0: res",
"reliably detect this “neural fake news” so that its harm can be minimized.\"",
"AI technology based on Neural Networks might enable adversaries to generate fake news.",
"save_machine_file = args.save_machine_file store_human_data = [] store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear()",
"major societal problem. Currently, fake news articles are written by humans, but recently-introduced",
"== news['label']: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if model",
"argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model",
"argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str)",
"driver = webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str)",
"else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in news.keys():",
"if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data,",
"(model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0 and",
"store_human_data = [] store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or",
"with open(file_name) as json_file: while True: line = json_file.readline() if len(line)!=0 and (model",
"EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in element.text.split()) and ( (news['label'] +",
"if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data,",
"can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news,",
"minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data,",
"json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for each",
"args = parser.parse_args() model = args.model file_name = args.file_name save_human_file = args.save_human_file save_machine_file",
"'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until(",
"import argparse import req #initialization human_data = [] machine_data = [] driver =",
"driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0 and (model == 'gpt2'): len_human =",
"time import json import argparse import req #initialization human_data = [] machine_data =",
"fake news. Our goal is to reliably detect this “neural fake news” so",
"'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model",
"= 0 #input read human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with",
"Currently, fake news articles are written by humans, but recently-introduced AI technology based",
"but recently-introduced AI technology based on Neural Networks might enable adversaries to generate",
"0.5)): label = 'human' else: label = 'machine' if label == news['label']: if",
"driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element:",
"res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)):",
"if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias') and (res['content_score'] < 0.5)): label",
"from selenium.webdriver.common.by import By import time import json import argparse import req #initialization",
"#print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0 and (model == 'gpt2'):",
"parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model =",
"machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data)",
"3 sec!\", res, maxtry) time.sleep(3) if res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision']",
"\"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in element.text.split()) and ( (news['label'] + \".\")",
"if label == news['label']: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load",
"driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as json_file: for each in store_human_data:",
"'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human <",
"WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not in element.text.split()) and ((news['label']",
"and (model == 'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data,",
"if element: if (news['label'] not in element.text.split()) and ( (news['label'] + \".\") not",
"== 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif",
"[] store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news",
"#for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv",
"label = 'human' else: label = 'machine' if label == news['label']: if label",
"model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox': req.init() else: print(\"Not",
"technology based on Neural Networks might enable adversaries to generate fake news. Our",
"written by humans, but recently-introduced AI technology based on Neural Networks might enable",
"model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver)",
"req #initialization human_data = [] machine_data = [] driver = webdriver.Firefox() #command-line argument",
"len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'): len_human = len(store_human_data)",
"res = req.sendRequest(news.get('article')) maxtry = maxtry - 1 except: print(\"Internet Error!Sleep 3 sec!\",",
"store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break",
"> 50: label = 'human' else: label = 'machine' #if label not in",
"as EC from selenium.webdriver.common.by import By import time import json import argparse import",
"= args.model file_name = args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data =",
"= \"\" try: while maxtry > 0: res = req.sendRequest(news.get('article')) maxtry = maxtry",
"= len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human < len(store_human_data):",
"= WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not in element.text.split()) and",
"if float(temp[0]) > 50: label = 'human' else: label = 'machine' #if label",
"'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res",
"in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news)",
"selenium import webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support",
"import req #initialization human_data = [] machine_data = [] driver = webdriver.Firefox() #command-line",
"goal is to reliably detect this “neural fake news” so that its harm",
"'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article'",
"news = \"Online disinformation, or fake news intended to deceive, has emerged as",
"#temporary i = 0 count = 0 #input read human_file = open(save_human_file, \"a+\")",
"def detectGPT2(news, driver, store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp",
"= driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if",
"driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR,",
"detect this “neural fake news” so that its harm can be minimized.\" from",
"json import argparse import req #initialization human_data = [] machine_data = [] driver",
"as json_file: while True: line = json_file.readline() if len(line)!=0 and (model == 'groverAI'):",
"\"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not in element.text.split()) and ((news['label'] + \".\")",
"label = 'machine' if label == news['label']: if label == 'human': store_human_data.append(news) else:",
"else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try:",
"store_machine_data.append(news) #model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model ==",
"== 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox': req.init() else: print(\"Not supported",
"= len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n')",
"== 'bias') and (res['content_score'] < 0.5)): label = 'human' else: label = 'machine'",
"its harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\")))",
"so that its harm can be minimized.\" from selenium import webdriver from seleniumrequests",
"== 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0 and (model",
"line = json_file.readline() if len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data,",
"news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit()",
"except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element:",
"human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name) as json_file: while",
"file_name = args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data = [] store_machine_data",
"len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n')",
"import Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from",
"== 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver,",
"driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model ==",
"break json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as json_file: for each",
"societal problem. Currently, fake news articles are written by humans, but recently-introduced AI",
"import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by import By import",
"(news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] ==",
"ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if",
"reliably detect this “neural fake news” so that its harm can be minimized.\")",
"and (model == 'fakebox'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data)",
"= 'machine' if label == news['label']: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news)",
"with open(save_human_file, \"w\") as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\")",
"((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] ==",
"each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for each in store_machine_data:",
"#check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to deceive, has",
"to reliably detect this “neural fake news” so that its harm can be",
"1 except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if res: if res[\"content_decision\"]",
"or fake news intended to deceive, has emerged as a major societal problem.",
"len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'):",
"news” so that its harm can be minimized.\" from selenium import webdriver from",
"\"\" try: while maxtry > 0: res = req.sendRequest(news.get('article')) maxtry = maxtry -",
"res, maxtry) time.sleep(3) if res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias')",
"model == 'fakebox': req.init() else: print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary i",
"print(element.text.split()) if (news['label'] not in element.text.split()) and ((news['label'] + \".\") not in element.text.split()[-1]):",
"= driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label = 'human'",
"from selenium import webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait from",
"'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver)",
"news['label']: # print(news['article'], label, news['label']) #else: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news)",
"humans, but recently-introduced AI technology based on Neural Networks might enable adversaries to",
"to deceive, has emerged as a major societal problem. Currently, fake news articles",
"= [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to",
"store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu",
"len_human = len(store_human_data) len_machine = len(store_machine_data) detectFakeBox(json.loads(line), store_human_data, store_machine_data) if len_human < len(store_human_data):",
"parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model = args.model file_name = args.file_name save_human_file =",
"\"a+\") with open(file_name) as json_file: while True: line = json_file.readline() if len(line)!=0 and",
"#else: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data):",
"json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as json_file: for each in",
"'bias') and (res['content_score'] < 0.5)): label = 'human' else: label = 'machine' if",
"if res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias') and (res['content_score'] <",
"[] driver = webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name',",
"parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args =",
"len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human <",
"driver, store_human_data, store_machine_data): if 'article' in news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\")",
"len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') elif len(line)!=0 and (model == 'fakebox'): len_human = len(store_human_data) len_machine =",
"'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human",
"if (news['label'] not in element.text.split()) and ((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'],",
"to generate fake news. Our goal is to reliably detect this “neural fake",
"by humans, but recently-introduced AI technology based on Neural Networks might enable adversaries",
"Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if res: if res[\"content_decision\"] == 'impartial' or",
"\"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans",
"#detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\") #detectGPT2(human_data, driver) elif model == 'fakebox':",
"args.model file_name = args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data = []",
"print(news['article'], label, news['label']) #else: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def",
"“neural fake news” so that its harm can be minimized.\" from selenium import",
"in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for each in store_machine_data: json_file.write(str(json.dumps(each))+'\\n')",
"EC from selenium.webdriver.common.by import By import time import json import argparse import req",
"'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0 and (model ==",
"#element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for news in",
"( (news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label']",
"news articles are written by humans, but recently-introduced AI technology based on Neural",
"maxtry = maxtry - 1 except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3)",
"= webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file',",
"type=str) args = parser.parse_args() model = args.model file_name = args.file_name save_human_file = args.save_human_file",
"else: break json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\") as json_file: for",
"\".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news)",
"0 label = \"\" try: while maxtry > 0: res = req.sendRequest(news.get('article')) maxtry",
"else: label = 'machine' #if label not in news['label']: # print(news['article'], label, news['label'])",
"= args.save_human_file save_machine_file = args.save_machine_file store_human_data = [] store_machine_data = [] #check_now =",
"that its harm can be minimized.\" from selenium import webdriver from seleniumrequests import",
"label = \"\" try: while maxtry > 0: res = req.sendRequest(news.get('article')) maxtry =",
"elif model == 'fakebox': req.init() else: print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary",
"i = 0 count = 0 #input read human_file = open(save_human_file, \"a+\") machine_file",
"= 0 count = 0 #input read human_file = open(save_human_file, \"a+\") machine_file =",
"and ((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label']",
"detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0 and (model == 'gpt2'): len_human",
"maxtry - 1 except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if res:",
"is to reliably detect this “neural fake news” so that its harm can",
"time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label = 'human' else: label",
"len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close()",
"driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50:",
"elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close() ''' with",
"deceive, has emerged as a major societal problem. Currently, fake news articles are",
"driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for news",
"#if label not in news['label']: # print(news['article'], label, news['label']) #else: if label ==",
"might enable adversaries to generate fake news. Our goal is to reliably detect",
"(res['content_score'] < 0.5)): label = 'human' else: label = 'machine' if label ==",
"element.text.split()[-1]): print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except:",
"= WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for news in check_now:",
"#driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to deceive, has emerged as a major",
"[] machine_data = [] driver = webdriver.Firefox() #command-line argument parsing parser = argparse.ArgumentParser()",
"len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif",
"== 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10",
"label = 'human' else: label = 'machine' #if label not in news['label']: #",
"news['label']: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if model ==",
"a major societal problem. Currently, fake news articles are written by humans, but",
"len(line)!=0 and (model == 'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver,",
"''' with open(save_human_file, \"w\") as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file,",
"\"w\") as json_file: for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file:",
"= json_file.readline() if len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data)",
"harm can be minimized.\" from selenium import webdriver from seleniumrequests import Chrome from",
"'fakebox': req.init() else: print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary i = 0",
"import time import json import argparse import req #initialization human_data = [] machine_data",
"#input read human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name) as",
"json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for each in store_machine_data: json_file.write(str(json.dumps(each))+'\\n') json_file.close() '''",
"open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name) as json_file: while True: line",
"human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close() '''",
"(model == 'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data) detectGPT2(json.loads(line), driver, store_human_data, store_machine_data)",
"its harm can be minimized.\" from selenium import webdriver from seleniumrequests import Chrome",
"element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not in element.text.split())",
"as yet! TODO:CTRL, FakeBox\") #temporary i = 0 count = 0 #input read",
"element: if (news['label'] not in element.text.split()) and ( (news['label'] + \".\") not in",
"label not in news['label']: # print(news['article'], label, news['label']) #else: if label == 'human':",
"and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1 elif len(line)!=0",
"model = args.model file_name = args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data",
"30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in element.text.split()) and ( (news['label']",
"store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data, store_machine_data): maxtry = 10 res =",
"import webdriver from seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import",
"parser.parse_args() model = args.model file_name = args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file",
"print(news['article'], element.text.split(), news['label']) else: if news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: ans",
"except: print(\"Internet Error!Sleep 3 sec!\", res, maxtry) time.sleep(3) if res: if res[\"content_decision\"] ==",
"check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try: element =",
"try: while maxtry > 0: res = req.sendRequest(news.get('article')) maxtry = maxtry - 1",
"disinformation, or fake news intended to deceive, has emerged as a major societal",
"Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.webdriver.common.by",
"from seleniumrequests import Chrome from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as",
"“neural fake news” so that its harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit()",
"not in element.text.split()) and ( (news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(),",
"0 #input read human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name)",
"recently-introduced AI technology based on Neural Networks might enable adversaries to generate fake",
"= [] store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake",
"= driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for",
"args.save_human_file save_machine_file = args.save_machine_file store_human_data = [] store_machine_data = [] #check_now = human_data",
"machine_file = open(save_machine_file, \"a+\") with open(file_name) as json_file: while True: line = json_file.readline()",
"type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model = args.model",
"WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver, store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear()",
"news['label'] == 'human': store_human_data.append(news) else: store_machine_data.append(news) except: print(\"Unresponsive!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\") def detectGPT2(news, driver, store_human_data, store_machine_data):",
"while True: line = json_file.readline() if len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line),",
"< 0.5)): label = 'human' else: label = 'machine' if label == news['label']:",
"#command-line argument parsing parser = argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file',",
"label = 'machine' #if label not in news['label']: # print(news['article'], label, news['label']) #else:",
"driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0]) > 50: label = 'human' else: label = 'machine' #if",
"store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if",
"detectGPT2(json.loads(line), driver, store_human_data, store_machine_data) if len_human < len(store_human_data): human_file.write(str(json.dumps(store_human_data[-1]))+'\\n') elif len_machine < len(store_machine_data):",
"import expected_conditions as EC from selenium.webdriver.common.by import By import time import json import",
"fake news intended to deceive, has emerged as a major societal problem. Currently,",
"+=1 elif len(line)!=0 and (model == 'gpt2'): len_human = len(store_human_data) len_machine = len(store_machine_data)",
"are written by humans, but recently-introduced AI technology based on Neural Networks might",
"== 'fakebox': req.init() else: print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary i =",
"req.init() else: print(\"Not supported as yet! TODO:CTRL, FakeBox\") #temporary i = 0 count",
"maxtry) time.sleep(3) if res: if res[\"content_decision\"] == 'impartial' or ((res['content_decision'] == 'bias') and",
"be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, \"sc-kvZOFW.bpFYHv\"))) def detectGrover(news, driver,",
"< len(store_machine_data): machine_file.write(str(json.dumps(store_machine_data[-1]))+'\\n') else: break json_file.close() driver.close() human_file.close() machine_file.close() ''' with open(save_human_file, \"w\")",
"for each in store_human_data: json_file.write(str(json.dumps(each))+'\\n') with open(save_machine_file, \"w\") as json_file: for each in",
"load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model == 'gpt2': driver.get(\"https://huggingface.co/openai-detector\")",
"#driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to deceive, has emerged as a",
"def detectGrover(news, driver, store_human_data, store_machine_data): #for news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans =",
"else: store_machine_data.append(news) #model load if model == 'groverAI': driver.get(\"https://grover.allenai.org/detect\") #detectGrover(human_data, driver) elif model",
"generate fake news. Our goal is to reliably detect this “neural fake news”",
"= args.save_machine_file store_human_data = [] store_machine_data = [] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online",
"[] #check_now = human_data #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").clear() #driver.find_element_by_class_name(\"ant-input.sc-htpNat.sc-ksYbfQ.iuRnVj\").send_keys(\"Online disinformation, or fake news intended to deceive,",
"so that its harm can be minimized.\") #ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bwzfXH.sc-jDwBTQ.kNoRcT.ant-btn-default\").submit() #element = WebDriverWait(driver,",
"try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label'] not in",
"parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args() model = args.model file_name = args.file_name",
"if len(line)!=0 and (model == 'groverAI'): #print(line) detectGrover(json.loads(line), driver, store_human_data, store_machine_data) count +=1",
"not in element.text.split()) and ((news['label'] + \".\") not in element.text.split()[-1]): print(news['article'], element.text.split(), news['label'])",
"read human_file = open(save_human_file, \"a+\") machine_file = open(save_machine_file, \"a+\") with open(file_name) as json_file:",
"= 10 res = 0 label = \"\" try: while maxtry > 0:",
"WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: if (news['label'] not in element.text.split()) and (",
"news in check_now: driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").clear() driver.find_element_by_css_selector(\"textarea.ant-input.sc-dxgOiQ.sc-kTUwUJ.gEHnFy\").send_keys(news.get('article')) ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() #ant-btn sc-bdVaJa sc-jbKcbu iUrOzv try:",
"if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #model load if model == 'groverAI':",
"args.file_name save_human_file = args.save_human_file save_machine_file = args.save_machine_file store_human_data = [] store_machine_data = []",
"news['label']) #else: if label == 'human': store_human_data.append(news) else: store_machine_data.append(news) #driver.close() def detectFakeBox(news, store_human_data,",
"Our goal is to reliably detect this “neural fake news” so that its",
"store_human_data.append(news) else: store_machine_data.append(news) except: ans = driver.find_element_by_css_selector(\"button.ant-btn.sc-bdVaJa.sc-jbKcbu.iUrOzv\").submit() try: element = WebDriverWait(driver, 30).until( EC.presence_of_element_located((By.CSS_SELECTOR,",
"import By import time import json import argparse import req #initialization human_data =",
"sc-jbKcbu iUrOzv try: element = WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, \"div.sc-dfVpRl.eIhhqn\"))) if element: print(element.text.split()) if (news['label']",
"news.keys(): #print(news.keys()) driver.find_element_by_id(\"textbox\").clear() driver.find_element_by_id(\"textbox\").send_keys(news['article']) temp = driver.find_element_by_id(\"real-percentage\") time.sleep(5) temp = driver.find_element_by_id(\"real-percentage\").text.split('%') if float(temp[0])",
"= argparse.ArgumentParser() parser.add_argument('--model', type=str) parser.add_argument('--file_name', type=str) parser.add_argument('--save_human_file', type=str) parser.add_argument('--save_machine_file', type=str) args = parser.parse_args()"
] |
[
"default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page",
"how these error pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad",
"# http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)",
"url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the",
"base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns",
"url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error),",
"from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin",
"from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static",
"TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet)",
"This allows the error pages to be debugged during development, just visit #",
"not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns =",
"= SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses',",
"static from django.contrib import admin from django.views.generic import TemplateView from rest_framework.routers import SimpleRouter",
"django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from",
"scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet,",
"CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router =",
"router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)),",
"import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages')",
"Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$',",
"'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns",
"rest_framework.routers import SimpleRouter from django.views import defaults as default_views from scholars.courses.viewsets import CourseViewSet,",
"include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter",
"the error pages to be debugged during development, just visit # these url",
"SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet)",
"default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)),",
"import SimpleRouter from django.views import defaults as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet,",
"django.contrib import admin from django.views.generic import TemplateView from rest_framework.routers import SimpleRouter from django.views",
"settings.DEBUG: # This allows the error pages to be debugged during development, just",
"default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import",
"include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from django rest-frameworks default router",
"= [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root'",
"see how these error pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception':",
"Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns",
"error pages to be debugged during development, just visit # these url in",
"default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ]",
"router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')),",
"to be debugged during development, just visit # these url in browser to",
"debugged during development, just visit # these url in browser to see how",
"'api-root' from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$',",
"router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)),",
"development, just visit # these url in browser to see how these error",
"admin from django.views.generic import TemplateView from rest_framework.routers import SimpleRouter from django.views import defaults",
"# the 'api-root' from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'),",
"url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from django rest-frameworks default",
"CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/',",
"+= [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$',",
"these url in browser to see how these error pages look like. urlpatterns",
"from django.contrib import admin from django.views.generic import TemplateView from rest_framework.routers import SimpleRouter from",
"settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import",
"permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows",
"Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns = [",
"url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns = [ url(r'^__debug__/',",
"SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router = SimpleRouter()",
"+= static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages to be",
"unicode_literals from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import",
"router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns =",
"TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error",
"import unicode_literals from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static",
"CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users',",
"from django.views import defaults as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet,",
"django.views.generic import TemplateView from rest_framework.routers import SimpleRouter from django.views import defaults as default_views",
"router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides',",
"rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns",
"the 'api-root' from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)),",
"static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages to be debugged",
"document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages to be debugged during",
"from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import",
"base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/',",
"] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ]",
"url(r'^api/', include(router.urls)), # the 'api-root' from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter #",
"kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if",
"if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] +",
"__future__ import unicode_literals from django.conf import settings from django.conf.urls import include, url from",
"import static from django.contrib import admin from django.views.generic import TemplateView from rest_framework.routers import",
"from scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages',",
"Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),",
"pages to be debugged during development, just visit # these url in browser",
"in browser to see how these error pages look like. urlpatterns += [",
"url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from django rest-frameworks default router #",
"kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not",
"UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet)",
"import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router",
"url in browser to see how these error pages look like. urlpatterns +=",
"UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories',",
"defaults as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet",
"browser to see how these error pages look like. urlpatterns += [ url(r'^400/$',",
"django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ]",
"[ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from",
"error pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$',",
"import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib",
"RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This",
"SimpleRouter from django.views import defaults as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet,",
"urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages to",
"url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception':",
"http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if",
"look like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception':",
"from __future__ import unicode_literals from django.conf import settings from django.conf.urls import include, url",
"TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones',",
"# This allows the error pages to be debugged during development, just visit",
"LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [",
"import defaults as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\",
"router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL,",
"# url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG:",
"CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/',",
"include, url from django.conf.urls.static import static from django.contrib import admin from django.views.generic import",
"from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')),",
"SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), #",
"router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet)",
"TemplateView from rest_framework.routers import SimpleRouter from django.views import defaults as default_views from scholars.courses.viewsets",
"include(router.urls)), # the 'api-root' from django rest-frameworks default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$',",
"pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied,",
"allows the error pages to be debugged during development, just visit # these",
"router.register(r'slides', SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')),",
"during development, just visit # these url in browser to see how these",
"to see how these error pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request,",
"url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from django",
"from rest_framework.routers import SimpleRouter from django.views import defaults as default_views from scholars.courses.viewsets import",
"be debugged during development, just visit # these url in browser to see",
"url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS:",
"router.register(r'timezones', TimezoneViewSet, base_name='timezones') router.register(r'languages', LanguageViewSet, base_name='languages') router.register(r'categories', CategoryViewSet) router.register(r'courses', CourseViewSet) router.register(r'slides', SlideViewSet) router.register(r'reviews',",
"default router # http://www.django-rest-framework.org/api-guide/routers/#defaultrouter # url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns +=",
"django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from",
"import include, url from django.conf.urls.static import static from django.contrib import admin from django.views.generic",
"SlideViewSet) router.register(r'reviews', SlideReviewViewSet) urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/',",
"Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in",
"\\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet,",
"] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages",
"visit # these url in browser to see how these error pages look",
"like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission",
"urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),",
"LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet)",
"django.views import defaults as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet,",
"url from django.conf.urls.static import static from django.contrib import admin from django.views.generic import TemplateView",
"import admin from django.views.generic import TemplateView from rest_framework.routers import SimpleRouter from django.views import",
"default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets",
"django.conf.urls.static import static from django.contrib import admin from django.views.generic import TemplateView from rest_framework.routers",
"from django.views.generic import TemplateView from rest_framework.routers import SimpleRouter from django.views import defaults as",
"# these url in browser to see how these error pages look like.",
"these error pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),",
"import TemplateView from rest_framework.routers import SimpleRouter from django.views import defaults as default_views from",
"scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from scholars.users.viewsets import UserViewSet",
"SlideReviewViewSet from scholars.users.viewsets import UserViewSet router = SimpleRouter() router.register(r'users', UserViewSet) router.register(r'timezones', TimezoneViewSet, base_name='timezones')",
"as default_views from scholars.courses.viewsets import CourseViewSet, SlideViewSet, CategoryViewSet, LanguageViewSet, TimezoneViewSet, \\ SlideReviewViewSet from",
"from django.conf.urls.static import static from django.contrib import admin from django.views.generic import TemplateView from",
"urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the",
"if settings.DEBUG: # This allows the error pages to be debugged during development,",
"url(r'^$', RedirectView.as_view(url=reverse_lazy('api-root'), permanent=False)), url(r'^$', TemplateView.as_view(template_name='index.html')), ] urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: #",
"Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar'",
"include(admin.site.urls)), url(r'^api/', include('scholars.authentication.urls')), url(r'^api/auth/', include('rest_framework_social_oauth2.urls')), url(r'^api/', include(router.urls)), # the 'api-root' from django rest-frameworks",
"[ url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}), url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}), url(r'^404/$', default_views.page_not_found,",
"kwargs={'exception': Exception('Page not Found')}), url(r'^500/$', default_views.server_error), ] if 'debug_toolbar' in settings.INSTALLED_APPS: import debug_toolbar",
"just visit # these url in browser to see how these error pages"
] |
[
"(board, page) data = urllib.urlopen(url).read() # print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\")",
"[] final = [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print",
"page) data = urllib.urlopen(url).read() # print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for",
"import urllib import time import random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 #",
"re import sys import urllib import time import random import time reload(sys) sys.setdefaultencoding(\"GBK\")",
"item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10 * random.random()) for item in matched:",
"board)): item = a.encode('utf-8') print item for kw in keywords: if item.find(kw) >=",
"board) html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close()",
"# for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'),",
"% (time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w')",
"'OurHouse' # 二手市场主版 # board = 'SecondMarket' # 租房 board = 'Career_Upgrade' keywords",
"# print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10): url",
"final = [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0]",
"soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item =",
"10 * random.random()) for item in matched: if item not in final: final.append(item)",
"open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in",
"in final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final)",
"from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print item for",
"urllib.urlopen(url).read() # print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\"",
"#soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' %",
"print item for kw in keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5 +",
"'w') # output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl = \"http://m.xxx.cn/rent\" # data",
"Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup import",
"urllib import time import random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房",
"-*- coding: utf-8 -*- # Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> #",
"# keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1,",
"# output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl",
"BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page)",
"# board = 'SecondMarket' # 租房 board = 'Career_Upgrade' keywords = [] matched",
"time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse' # 二手市场主版",
"data = urllib.urlopen(url).read() # print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a",
"href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html',",
"= a.encode('utf-8') print item for kw in keywords: if item.find(kw) >= 0: matched.append(item)",
"10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read() # print data",
"# Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup import re import",
"# 微信:hustos from bs4 import BeautifulSoup import re import sys import urllib import",
"(time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') #",
"utf-8 -*- # Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 #",
"time import random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board",
"bs4 import BeautifulSoup import re import sys import urllib import time import random",
"random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse'",
"# board = 'OurHouse' # 二手市场主版 # board = 'SecondMarket' # 租房 board",
"in matched: if item not in final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base",
"html += \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board)",
"租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup import re",
"in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print item for kw in keywords:",
"open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl = \"http://m.xxx.cn/rent\" #",
"random.random()) for item in matched: if item not in final: final.append(item) html =",
"update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\"",
"from bs4 import BeautifulSoup import re import sys import urllib import time import",
"a.encode('utf-8') print item for kw in keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5",
"# 租房 board = 'Career_Upgrade' keywords = [] matched = [] final =",
"in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read() #",
"0: matched.append(item) time.sleep(5 + 10 * random.random()) for item in matched: if item",
"time.sleep(5 + 10 * random.random()) for item in matched: if item not in",
"BeautifulSoup import re import sys import urllib import time import random import time",
"sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse' # 二手市场主版 # board",
"matched.append(item) time.sleep(5 + 10 * random.random()) for item in matched: if item not",
"/></head><body>\" html += \"<br/>\".join(final) html += \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" %",
"import time import random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 #",
"kw in keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10 * random.random())",
"matched = [] final = [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip())",
"'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read() # print data soup = BeautifulSoup(data,",
"item not in final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html",
"= \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html += \"<p>last update",
"\"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() # notify,爬完后通知用户 #",
">= 0: matched.append(item) time.sleep(5 + 10 * random.random()) for item in matched: if",
"# Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from",
"= [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup",
"for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print item for kw",
"% (board, page) data = urllib.urlopen(url).read() # print data soup = BeautifulSoup(data, \"html5lib\",",
"href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html += \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\"",
"微信:hustos from bs4 import BeautifulSoup import re import sys import urllib import time",
"# 支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse' # 二手市场主版 # board =",
"print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)):",
"\"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html += \"<p>last update at",
"<reponame>RobinTang/machine-learning<gh_stars>1-10 # -*- coding: utf-8 -*- # Filename: my_crawl.py # Function: 租房小爬虫 #",
"= 'SecondMarket' # 租房 board = 'Career_Upgrade' keywords = [] matched = []",
"+ board)): item = a.encode('utf-8') print item for kw in keywords: if item.find(kw)",
"'Career_Upgrade' keywords = [] matched = [] final = [] # for kw",
"board = 'Career_Upgrade' keywords = [] matched = [] final = [] #",
"</p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\" # output =",
"# 二手房 # board = 'OurHouse' # 二手市场主版 # board = 'SecondMarket' #",
"keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10 * random.random()) for item",
"# output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl = \"http://m.xxx.cn/rent\" # data =",
"output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl = \"http://m.xxx.cn/rent\" # data = urllib.urlopen(notifyUrl).read()",
"import BeautifulSoup import re import sys import urllib import time import random import",
"'SecondMarket' # 租房 board = 'Career_Upgrade' keywords = [] matched = [] final",
"in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page",
"if item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10 * random.random()) for item in",
"+= \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html",
"微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup import re import sys import urllib",
"# -*- coding: utf-8 -*- # Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL>",
"in keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10 * random.random()) for",
"kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for",
"二手市场主版 # board = 'SecondMarket' # 租房 board = 'Career_Upgrade' keywords = []",
"for page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data =",
"for item in matched: if item not in final: final.append(item) html = \"<html><head><meta",
"matched: if item not in final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/'",
"html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() #",
"+= \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() # notify,爬完后通知用户",
"range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read() # print",
"[] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup =",
"html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html += \"<p>last",
"at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\" #",
"\"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print item",
"-*- # Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos",
"board = 'OurHouse' # 二手市场主版 # board = 'SecondMarket' # 租房 board =",
"= urllib.urlopen(url).read() # print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in",
"\"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html +=",
"= open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl = \"http://m.xxx.cn/rent\"",
"charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html += \"<p>last update at %s",
"= BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board,",
"reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse' # 二手市场主版 #",
"%s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()), board) html += \"</body></html>\" # output",
"data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item",
"keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10):",
"print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10): url =",
"sys import urllib import time import random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可",
"time.localtime()), board) html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) #",
"租房 board = 'Career_Upgrade' keywords = [] matched = [] final = []",
"url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read() # print data soup",
"coding: utf-8 -*- # Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚",
"output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html) # output.close() # notify,爬完后通知用户 # notifyUrl =",
"%X', time.localtime()), board) html += \"</body></html>\" # output = open('/home/wwwroot/rent/index.html', 'w') # output.write(html)",
"keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\") for page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s'",
"final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html",
"/><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html += \"<p>last update at %s </p><p><a",
"html += \"<br/>\".join(final) html += \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d",
"a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print item for kw in",
"item = a.encode('utf-8') print item for kw in keywords: if item.find(kw) >= 0:",
"not in final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html +=",
"二手房 # board = 'OurHouse' # 二手市场主版 # board = 'SecondMarket' # 租房",
"\"html5lib\") for page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data",
"= 'Career_Upgrade' keywords = [] matched = [] final = [] # for",
"import sys import urllib import time import random import time reload(sys) sys.setdefaultencoding(\"GBK\") #",
"= 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read() # print data soup =",
"# print data soup = BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" +",
"import re import sys import urllib import time import random import time reload(sys)",
"# Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup",
"import random import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board =",
"# 微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup import re import sys import",
"Filename: my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4",
"for kw in keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10 *",
"= [] final = [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) #",
"* random.random()) for item in matched: if item not in final: final.append(item) html",
"\"<br/>\".join(final) html += \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X', time.localtime()),",
"# 二手市场主版 # board = 'SecondMarket' # 租房 board = 'Career_Upgrade' keywords =",
"item for kw in keywords: if item.find(kw) >= 0: matched.append(item) time.sleep(5 + 10",
"my_crawl.py # Function: 租房小爬虫 # Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4 import",
"item in matched: if item not in final: final.append(item) html = \"<html><head><meta charset='UTF-8'",
"= [] matched = [] final = [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines():",
"import time reload(sys) sys.setdefaultencoding(\"GBK\") # 支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse' #",
"[] matched = [] final = [] # for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): #",
"= BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8')",
"Author:<EMAIL> # 微博:OceanBase晓楚 # 微信:hustos from bs4 import BeautifulSoup import re import sys",
"+ 10 * random.random()) for item in matched: if item not in final:",
"final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\" html += \"<br/>\".join(final) html +=",
"keywords = [] matched = [] final = [] # for kw in",
"支持爬不同版面,取消下面的注释即可 # 二手房 # board = 'OurHouse' # 二手市场主版 # board = 'SecondMarket'",
"= 'OurHouse' # 二手市场主版 # board = 'SecondMarket' # 租房 board = 'Career_Upgrade'",
"BeautifulSoup(data, \"html5lib\", from_encoding=\"utf8\") for a in soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print",
"for kw in open('/home/wwwroot/rent/keywords.txt').readlines(): # keywords.append(kw.strip()) # print keywords[0] #soup = BeautifulSoup(open('pg2.html'), \"html5lib\")",
"if item not in final: final.append(item) html = \"<html><head><meta charset='UTF-8' /><title>租房</title><base href='http://m.newsmth.net/' /></head><body>\"",
"board = 'SecondMarket' # 租房 board = 'Career_Upgrade' keywords = [] matched =",
"soup.find_all(href=re.compile(\"\\/article\\/\" + board)): item = a.encode('utf-8') print item for kw in keywords: if",
"+= \"<br/>\".join(final) html += \"<p>last update at %s </p><p><a href='http://m.newsmth.net/board/%s'>水木社区</a></p>\" % (time.strftime('%Y-%m-%d %X',",
"page in range(1, 10): url = 'http://m.newsmth.net/board/%s?p=%s' % (board, page) data = urllib.urlopen(url).read()"
] |
[
"string_types import matplotlib.pyplot as plt from scipy.integrate import trapz from explore.utils import Proportions",
"_scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a univariate kernel density estimate using scipy.\"\"\"",
"If True, draw the cumulative distribution estimated by the kde. ax : matplotlib",
"# Fall back to scipy if missing statsmodels if kernel != \"gau\": kernel",
"overall_kde # plot the KDE for each class for cl in cl_kdes.keys(): _kwargs",
"\"only implemented in statsmodels. \" \"Please install statsmodels.\") x, y = _scipy_univariate_kde(data, bw,",
"for bivariate plots. legend : bool, optional If True, add a legend or",
"was specified in the call label = kwargs.pop(\"label\", None) # Otherwise check if",
"'biw' | 'epa' | 'tri' | 'triw' }, optional Code for shape of",
"'silverman' | scalar | pair of scalars }, optional Name of reference method",
"kde.support, kde.cdf else: grid, y = kde.support, kde.density return grid, y def _scipy_univariate_kde(data,",
"data. gridsize : int, optional Number of discrete points in the evaluation grid.",
"area under KDE by number of samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid':",
"Use the active color cycle to find the plot color facecolor = kwargs.pop(\"facecolor\",",
"cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl if cl == overall_name: if",
"x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate",
"min(data.max() + bw * cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes,",
"trapz from explore.utils import Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels = True",
"else facecolor # Draw the KDE plot and, optionally, shade ax.plot(x, y, color=color,",
"cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid, y): tot",
"not None and legend label = \"_nolegend_\" if label is None else label",
"the KDE if np.nan_to_num(data.var()) == 0: # Don't try to compute KDE on",
"fft = kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut,",
"or ``plt.contour{f}`` depending on whether a univariate or bivariate plot is being drawn.",
"each KDE by the number of points in each class. Also compute the",
"(i.e. ignoring class labels) \"\"\" # TODO: do we really need ensure_norm overall_grid,",
"include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws)",
"x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is not None and cl",
"be on the y axis if vertical: x, y = y, x #",
"ax.plot(x, y, color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True),",
"max(data.min() - bw * cut, clip[0]) support_max = min(data.max() + bw * cut,",
"y, x # Check if a label was specified in the call label",
"for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid']",
"the data if the plot should be on the y axis if vertical:",
"for the standard deviation of the data. gridsize : int, optional Number of",
"density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data if",
"or pair of pair of scalars, optional Lower and upper bounds for datapoints",
"_univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\"",
"import string_types import matplotlib.pyplot as plt from scipy.integrate import trapz from explore.utils import",
"**kde_kws): \"\"\" KDEs for values with associated classes. Computes the KDE of each",
"If True, add a legend or label the axes when possible. cumulative :",
"import statsmodels.nonparametric.api as smnp _has_statsmodels = True except ImportError: _has_statsmodels = False def",
"is not None and legend label = \"_nolegend_\" if label is None else",
"clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws)",
"clip : pair of scalars, or pair of pair of scalars, optional Lower",
"class labels. overall_kde: dict Overall KDE (i.e. ignoring class labels) \"\"\" # TODO:",
"we really need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y =",
"cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE by number of samples",
"data is bivariate). vertical : bool, optional If True, density is on x-axis.",
"kde.cdf else: grid, y = kde.support, kde.density return grid, y def _scipy_univariate_kde(data, bw,",
"cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) # in case 'overall' is one of",
"cl_kdes, overall_kde def norm_kde(grid, y): tot = trapz(y=y, x=grid) return y / tot",
"\"scott\" else bw bw = getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid =",
"= ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None,",
"KDE can only use gaussian kernel. bw : {'scott' | 'silverman' | scalar",
"KDE of each class then weights each KDE by the number of points",
"do we really need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y",
"KDE of univariate data. shade : bool, optional If True, shade in the",
"cl in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is not None and cl",
"estimated by the kde. ax : matplotlib axes, optional Axes to plot on,",
"cycle to find the plot color facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x,",
"shade in the area under the KDE curve (or draw with filled contours",
"= _kde_support(data, bw, gridsize, cut, clip) y = kde(grid) return grid, y def",
"= overall_kde # plot the KDE for each class for cl in cl_kdes.keys():",
"line, = ax.plot(x, y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor =",
"datapoints used to fit KDE. Can provide a pair of (low, high) bounds",
"are class labels. overall_kde: dict Overall KDE (i.e. ignoring class labels) \"\"\" #",
"'epa' | 'tri' | 'triw' }, optional Code for shape of kernel to",
"if bw != \"scott\": # scipy default msg = (\"Ignoring bandwidth choice, \"",
"the clipping if clip is None: clip = (-np.inf, np.inf) # Calculate the",
"density estimate.\"\"\" support_min = max(data.min() - bw * cut, clip[0]) support_max = min(data.max()",
"If True, density is on x-axis. kernel : {'gau' | 'cos' | 'biw'",
"= cl_kdes[cl]['y'] if cl_palette is not None and cl in cl_palette: _kwargs['color'] =",
"only use gaussian kernel. bw : {'scott' | 'silverman' | scalar | pair",
"statsmodels. \" \"Please install statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize, cut, clip)",
"= _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative) else: # Fall back to",
"with associated classes. Computes the KDE of each class then weights each KDE",
"trapz(y=y, x=grid) return y / tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None,",
"density axis minimum to 0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) #",
"the KDE of each class then weights each KDE by the number of",
"axis minimum to 0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw",
"optional Axes to plot on, otherwise uses current axes. kwargs : key, value",
"_kde_support(data, bw, gridsize, cut, clip) y = kde(grid) return grid, y def _kde_support(data,",
"_univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y':",
"or bivariate plot is being drawn. Output ------ x: array-like, (n_grid_points, ) The",
"each class. Also compute the overall KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict",
"**kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color if facecolor is",
"def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes,",
"kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) # in case 'overall' is",
"clip[0]) support_max = min(data.max() + bw * cut, clip[1]) return np.linspace(support_min, support_max, gridsize)",
"clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE of univariate data. shade",
"classes == cl cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm:",
"label was specified in the call label = kwargs.pop(\"label\", None) # Otherwise check",
"np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip,",
"fit with. Bivariate KDE can only use gaussian kernel. bw : {'scott' |",
"0, y, **shade_kws) # Set the density axis minimum to 0 if vertical:",
"check if the data object has a name if label is None and",
"zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x,",
"msg = (\"Ignoring bandwidth choice, \" \"please upgrade scipy to use a different",
"of discrete points in the evaluation grid. cut : scalar, optional Draw the",
"and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False,",
"values of the KDE. \"\"\" # Sort out the clipping if clip is",
"\"\"\"Establish support for a kernel density estimate.\"\"\" support_min = max(data.min() - bw *",
"bivariate plots. legend : bool, optional If True, add a legend or label",
"numpy as np import warnings from scipy import stats from six import string_types",
"in the evaluation grid. cut : scalar, optional Draw the estimate to cut",
"legend = label is not None and legend label = \"_nolegend_\" if label",
"color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1),",
"\"Please install statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make",
"is evaluated. y: array-like, (n_grid_points, ) The values of the KDE. \"\"\" #",
"on singular data msg = \"Data must have variance to compute a kernel",
"other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are",
"in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl if cl == overall_name:",
"class labels) \"\"\" # TODO: do we really need ensure_norm overall_grid, overall_y =",
"x, y = _scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make sure the density",
"the estimate to cut * bw from the extreme data points. clip :",
"overall KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict KDE for each class. Keys",
"ax : matplotlib axes, optional Axes to plot on, otherwise uses current axes.",
"a kernel density estimate.\"\"\" support_min = max(data.min() - bw * cut, clip[0]) support_max",
"here handles, labels = ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return ax def",
"if cl_palette is not None and cl in cl_palette: _kwargs['color'] = cl_palette[cl] if",
"y = _scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make sure the density is",
"shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes",
"# Calculate the KDE if np.nan_to_num(data.var()) == 0: # Don't try to compute",
"import stats from six import string_types import matplotlib.pyplot as plt from scipy.integrate import",
"bivariate plot. Note that the underlying computational libraries have different interperetations for this",
"else label # Use the active color cycle to find the plot color",
"evaluation grid. cut : scalar, optional Draw the estimate to cut * bw",
"if we're going to add a legend legend = label is not None",
"kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) # in case 'overall' is one",
"the evaluation grid. cut : scalar, optional Draw the estimate to cut *",
"associated classes. Computes the KDE of each class then weights each KDE by",
"array-like, (n_grid_points, ) The values of the KDE. \"\"\" # Sort out the",
"keyword arguments are passed to ``plt.plot()`` or ``plt.contour{f}`` depending on whether a univariate",
"number of samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return",
"np.unique(classes): cl_mask = classes == cl cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values,",
"the data. gridsize : int, optional Number of discrete points in the evaluation",
"{'scott' | 'silverman' | scalar | pair of scalars }, optional Name of",
": {'scott' | 'silverman' | scalar | pair of scalars }, optional Name",
"= {'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes = {} for cl",
"else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde # plot the KDE for each",
"clip) y = kde(grid) return grid, y def _kde_support(data, bw, gridsize='default', cut=3, clip=None):",
"legend : bool, optional If True, add a legend or label the axes",
"of kernel to fit with. Bivariate KDE can only use gaussian kernel. bw",
"bool, optional If True, density is on x-axis. kernel : {'gau' | 'cos'",
"kernel density estimate.\"\"\" support_min = max(data.min() - bw * cut, clip[0]) support_max =",
"gridsize : int, optional Number of discrete points in the evaluation grid. cut",
"optional Code for shape of kernel to fit with. Bivariate KDE can only",
"gridsize, cut, clip): \"\"\"Compute a univariate kernel density estimate using scipy.\"\"\" try: kde",
"``plt.contour{f}`` depending on whether a univariate or bivariate plot is being drawn. Output",
"\" \"only implemented in statsmodels. \" \"Please install statsmodels.\") x, y = _scipy_univariate_kde(data,",
"kde is evaluated. y: array-like, (n_grid_points, ) The values of the KDE. \"\"\"",
"dict KDE for each class. Keys are class labels. overall_kde: dict Overall KDE",
"ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE by number of",
"| 'cos' | 'biw' | 'epa' | 'tri' | 'triw' }, optional Code",
"else: ax.set_ylim(0, auto=None) # Draw the legend here handles, labels = ax.get_legend_handles_labels() if",
"kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None)",
"implemented in statsmodels. \" \"Please install statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize,",
"np.inf) # Calculate the KDE if np.nan_to_num(data.var()) == 0: # Don't try to",
"from the extreme data points. clip : pair of scalars, or pair of",
"Note that the underlying computational libraries have different interperetations for this parameter: ``statsmodels``",
"Can provide a pair of (low, high) bounds for bivariate plots. legend :",
"cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid,",
"_kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if",
"= '--' # _kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label'] = None #",
"| pair of scalars }, optional Name of reference method to determine kernel",
"True except ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100,",
"try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if bw !=",
"if shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws)",
"y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative) else: # Fall back",
"\"gau\" msg = \"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative:",
"axis if vertical: x, y = y, x # Check if a label",
"x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) # Set the density axis minimum",
"np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde # plot",
"== overall_name: if not include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha'] = .2",
"values where the kde is evaluated. y: array-like, (n_grid_points, ) The values of",
"gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y = kde.support, kde.cdf else: grid, y",
"grid, y def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a univariate kernel density",
"Don't try to compute KDE on singular data msg = \"Data must have",
"where the kde is evaluated. y: array-like, (n_grid_points, ) The values of the",
"to use a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw = \"scotts\"",
"clip, cumulative=cumulative) else: # Fall back to scipy if missing statsmodels if kernel",
"choice, \" \"please upgrade scipy to use a different bandwidth.\") warnings.warn(msg, UserWarning) if",
"Fall back to scipy if missing statsmodels if kernel != \"gau\": kernel =",
"msg = \"Data must have variance to compute a kernel density estimate.\" warnings.warn(msg,",
"UserWarning) x, y = np.array([]), np.array([]) elif _has_statsmodels: # Prefer using statsmodels for",
"bool, optional If True, add a legend or label the axes when possible.",
"kernel, bw, gridsize, cut, clip, cumulative=cumulative) else: # Fall back to scipy if",
"factor for the standard deviation of the data. gridsize : int, optional Number",
"line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color if facecolor is None else facecolor",
"= label is not None and legend label = \"_nolegend_\" if label is",
"optional Number of discrete points in the evaluation grid. cut : scalar, optional",
"Computes the KDE of univariate data. shade : bool, optional If True, shade",
"extreme data points. clip : pair of scalars, or pair of pair of",
"| scalar | pair of scalars }, optional Name of reference method to",
"def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for a kernel density estimate.\"\"\"",
"``scipy`` treats it as a scaling factor for the standard deviation of the",
"_kwargs['color'] = cl_palette[cl] if cl_labels is not None and cl in cl_labels: _kwargs['label']",
"if isinstance(bw, string_types): bw = \"scotts\" if bw == \"scott\" else bw bw",
"# _kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label'] = None # 'overall' _kwargs['color']",
"overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde # plot the",
"bw from the extreme data points. clip : pair of scalars, or pair",
"area under the KDE curve (or draw with filled contours when data is",
"sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the",
"_kwargs['ls'] = '--' # _kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label'] = None",
"cut=cut, clip=clip) if cumulative: grid, y = kde.support, kde.cdf else: grid, y =",
"ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None,",
"not None and cl in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is not",
"gaussian kernel. bw : {'scott' | 'silverman' | scalar | pair of scalars",
"a univariate or bivariate plot is being drawn. Output ------ x: array-like, (n_grid_points,",
"'cos' | 'biw' | 'epa' | 'tri' | 'triw' }, optional Code for",
"= cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is not None and cl in",
"the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def",
"matplotlib.pyplot as plt from scipy.integrate import trapz from explore.utils import Proportions try: import",
"None else facecolor # Draw the KDE plot and, optionally, shade ax.plot(x, y,",
"univariate data. shade : bool, optional If True, shade in the area under",
"optional Draw the estimate to cut * bw from the extreme data points.",
"if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw the legend here handles,",
"= dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if",
"Name of reference method to determine kernel size, scalar factor, or scalar for",
"whether a univariate or bivariate plot is being drawn. Output ------ x: array-like,",
"cumulative distribution estimated by the kde. ax : matplotlib axes, optional Axes to",
"for kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative)",
"if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE by number",
"1), ) if shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0,",
"= trapz(y=y, x=grid) return y / tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True,",
"# Draw the legend here handles, labels = ax.get_legend_handles_labels() if legend and handles:",
"is not None and cl in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] =",
"= smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y =",
"UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are currently \" \"only implemented in statsmodels.",
"the plot should be on the y axis if vertical: x, y =",
"labels. overall_kde: dict Overall KDE (i.e. ignoring class labels) \"\"\" # TODO: do",
"}, optional Name of reference method to determine kernel size, scalar factor, or",
"= cl_labels[cl] else: _kwargs['label'] = cl if cl == overall_name: if not include_overall:",
"+ bw * cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True,",
"use gaussian kernel. bw : {'scott' | 'silverman' | scalar | pair of",
"Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip",
"cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y']",
"the legend here handles, labels = ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return",
"in statsmodels. \" \"Please install statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize, cut,",
"``plt.plot()`` or ``plt.contour{f}`` depending on whether a univariate or bivariate plot is being",
"None and legend label = \"_nolegend_\" if label is None else label #",
"handles, labels = ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values,",
"scalars, optional Lower and upper bounds for datapoints used to fit KDE. Can",
"plots. legend : bool, optional If True, add a legend or label the",
"estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data)",
"cl_kdes = {} for cl in np.unique(classes): cl_mask = classes == cl cl_values",
"overall_kde def norm_kde(grid, y): tot = trapz(y=y, x=grid) return y / tot def",
"shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel density estimate on one",
"are passed to ``plt.plot()`` or ``plt.contour{f}`` depending on whether a univariate or bivariate",
"support for a kernel density estimate.\"\"\" support_min = max(data.min() - bw * cut,",
"plt from scipy.integrate import trapz from explore.utils import Proportions try: import statsmodels.nonparametric.api as",
"(or draw with filled contours when data is bivariate). vertical : bool, optional",
"cut : scalar, optional Draw the estimate to cut * bw from the",
"fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y = kde.support, kde.cdf else: grid,",
"if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] =",
"overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes = {} for",
"label # Use the active color cycle to find the plot color facecolor",
"values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) #",
"return y / tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot",
"np.array([]), np.array([]) elif _has_statsmodels: # Prefer using statsmodels for kernel flexibility x, y",
"| 'epa' | 'tri' | 'triw' }, optional Code for shape of kernel",
"ax.set_ylim(0, auto=None) # Draw the legend here handles, labels = ax.get_legend_handles_labels() if legend",
"with. Bivariate KDE can only use gaussian kernel. bw : {'scott' | 'silverman'",
"overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes)",
"Check if a label was specified in the call label = kwargs.pop(\"label\", None)",
"y, **shade_kws) # Set the density axis minimum to 0 if vertical: ax.set_xlim(0,",
"np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data if the plot should be on",
"classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde =",
"y / tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a",
"warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw = \"scotts\" if bw == \"scott\" else",
"cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area",
"active color cycle to find the plot color facecolor = kwargs.pop(\"facecolor\", None) line,",
"not None and cl in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl",
"cut, clip[0]) support_max = min(data.max() + bw * cut, clip[1]) return np.linspace(support_min, support_max,",
"that the underlying computational libraries have different interperetations for this parameter: ``statsmodels`` uses",
"different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw = \"scotts\" if bw ==",
"density estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([]) elif _has_statsmodels: # Prefer",
"KDE for each class. Keys are class labels. overall_kde: dict Overall KDE (i.e.",
"dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical:",
"'overall' is one of the classes if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes))",
"kernel != \"gau\": kernel = \"gau\" msg = \"Kernel other than `gau` requires",
"label is not None and legend label = \"_nolegend_\" if label is None",
"warnings from scipy import stats from six import string_types import matplotlib.pyplot as plt",
"\"Data must have variance to compute a kernel density estimate.\" warnings.warn(msg, UserWarning) x,",
"overall_name: if not include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha'] = .2 _kwargs['zorder']",
"import matplotlib.pyplot as plt from scipy.integrate import trapz from explore.utils import Proportions try:",
"class for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x =",
"six import string_types import matplotlib.pyplot as plt from scipy.integrate import trapz from explore.utils",
"each dimension of the bivariate plot. Note that the underlying computational libraries have",
"\"_nolegend_\" if label is None else label # Use the active color cycle",
"key, value pairings Other keyword arguments are passed to ``plt.plot()`` or ``plt.contour{f}`` depending",
": pair of scalars, or pair of pair of scalars, optional Lower and",
"optional If True, add a legend or label the axes when possible. cumulative",
"if legend and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True,",
"if missing statsmodels if kernel != \"gau\": kernel = \"gau\" msg = \"Kernel",
"**kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y}",
"if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) # Set",
"depending on whether a univariate or bivariate plot is being drawn. Output ------",
"support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values with associated",
"it directly, but ``scipy`` treats it as a scaling factor for the standard",
"return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values",
"hasattr(x, \"name\"): label = x.name # Decide if we're going to add a",
"_kwargs['zorder'] = 1 _kwargs['label'] = None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] =",
"the kde is evaluated. y: array-like, (n_grid_points, ) The values of the KDE.",
"line.remove() kwargs.pop(\"color\", None) facecolor = color if facecolor is None else facecolor #",
"bw * cut, clip[0]) support_max = min(data.max() + bw * cut, clip[1]) return",
": bool, optional If True, shade in the area under the KDE curve",
"clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for",
"None) line, = ax.plot(x, y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor",
"== 0: # Don't try to compute KDE on singular data msg =",
"gridsize='default', cut=3, clip=None): \"\"\"Establish support for a kernel density estimate.\"\"\" support_min = max(data.min()",
"by the number of points in each class. Also compute the overall KDE.",
"None) # Otherwise check if the data object has a name if label",
"'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde",
"= get_class_kdes(values, classes, **kde_kws) # in case 'overall' is one of the classes",
"bandwidth choice, \" \"please upgrade scipy to use a different bandwidth.\") warnings.warn(msg, UserWarning)",
"by number of samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y}",
"classes, **kde_kws) # in case 'overall' is one of the classes if 'overall'",
"density is on x-axis. kernel : {'gau' | 'cos' | 'biw' | 'epa'",
"label = \"_nolegend_\" if label is None else label # Use the active",
"kwargs : key, value pairings Other keyword arguments are passed to ``plt.plot()`` or",
"fit KDE. Can provide a pair of (low, high) bounds for bivariate plots.",
"# Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return",
"discrete points in the evaluation grid. cut : scalar, optional Draw the estimate",
"y = kde.support, kde.density return grid, y def _scipy_univariate_kde(data, bw, gridsize, cut, clip):",
"import warnings from scipy import stats from six import string_types import matplotlib.pyplot as",
"= \"scotts\" if bw == \"scott\" else bw bw = getattr(kde, \"%s_factor\" %",
"_univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE",
"Otherwise check if the data object has a name if label is None",
"vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) # Set the",
"ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) # Set the density",
"cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes,",
"as smnp _has_statsmodels = True except ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False,",
"y, color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\",",
"is bivariate). vertical : bool, optional If True, density is on x-axis. kernel",
"compute a kernel density estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([]) elif",
"number of points in each class. Also compute the overall KDE. Output ------",
"_univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel density estimate",
"else: # Fall back to scipy if missing statsmodels if kernel != \"gau\":",
"compute the overall KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict KDE for each",
"if a label was specified in the call label = kwargs.pop(\"label\", None) #",
"is None else label # Use the active color cycle to find the",
"ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None,",
"interperetations for this parameter: ``statsmodels`` uses it directly, but ``scipy`` treats it as",
"y = kde.support, kde.cdf else: grid, y = kde.support, kde.density return grid, y",
"clip): \"\"\"Compute a univariate kernel density estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data,",
"ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None,",
"cl == overall_name: if not include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha'] =",
"0: # Don't try to compute KDE on singular data msg = \"Data",
"bw * cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws):",
"to fit with. Bivariate KDE can only use gaussian kernel. bw : {'scott'",
"bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y = kde.support, kde.cdf else:",
"- bw * cut, clip[0]) support_max = min(data.max() + bw * cut, clip[1])",
"of scalars, optional Lower and upper bounds for datapoints used to fit KDE.",
"!= \"scott\": # scipy default msg = (\"Ignoring bandwidth choice, \" \"please upgrade",
"out the clipping if clip is None: clip = (-np.inf, np.inf) # Calculate",
"axes. kwargs : key, value pairings Other keyword arguments are passed to ``plt.plot()``",
"def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs):",
"it as a scaling factor for the standard deviation of the data. gridsize",
"plt.gca() # Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1)",
"underlying computational libraries have different interperetations for this parameter: ``statsmodels`` uses it directly,",
"cut, clip) y = kde(grid) return grid, y def _kde_support(data, bw, gridsize='default', cut=3,",
"if vertical: x, y = y, x # Check if a label was",
"optionally, shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25),",
"going to add a legend legend = label is not None and legend",
"one of the classes if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name",
"KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict KDE for each class. Keys are",
"(-np.inf, np.inf) # Calculate the KDE if np.nan_to_num(data.var()) == 0: # Don't try",
"def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel density",
"**shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) # Set the density axis minimum to",
"y = cl_kdes[cl]['y'] if cl_palette is not None and cl in cl_palette: _kwargs['color']",
"classes. Computes the KDE of each class then weights each KDE by the",
"if cl_labels is not None and cl in cl_labels: _kwargs['label'] = cl_labels[cl] else:",
"return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={},",
"= 1 _kwargs['label'] = None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False",
") if shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0, y,",
"| 'triw' }, optional Code for shape of kernel to fit with. Bivariate",
"= classes == cl cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if",
"KDE for each class for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] =",
"compute KDE on singular data msg = \"Data must have variance to compute",
"overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid':",
"cumulative=False, **kwargs): \"\"\" Computes the KDE of univariate data. shade : bool, optional",
"vertical : bool, optional If True, density is on x-axis. kernel : {'gau'",
"overall_y} cl_props = Proportions(classes) cl_kdes = {} for cl in np.unique(classes): cl_mask =",
"optional If True, density is on x-axis. kernel : {'gau' | 'cos' |",
"bw bw = getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid = _kde_support(data, bw,",
"a univariate kernel density estimate on one of the axes.\"\"\" if ax is",
"for datapoints used to fit KDE. Can provide a pair of (low, high)",
"grid, y = kde.support, kde.density return grid, y def _scipy_univariate_kde(data, bw, gridsize, cut,",
"x # Check if a label was specified in the call label =",
"gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values with associated classes.",
"True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else:",
"= True except ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\",",
"ax.fill_between(x, 0, y, **shade_kws) # Set the density axis minimum to 0 if",
"cl_kdes: dict KDE for each class. Keys are class labels. overall_kde: dict Overall",
"auto=None) # Draw the legend here handles, labels = ax.get_legend_handles_labels() if legend and",
"overall_name = 'overall' cl_kdes[overall_name] = overall_kde # plot the KDE for each class",
"* cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\"",
"Draw the estimate to cut * bw from the extreme data points. clip",
"of scalars, or pair of pair of scalars, optional Lower and upper bounds",
"_statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate kernel density estimate",
"this parameter: ``statsmodels`` uses it directly, but ``scipy`` treats it as a scaling",
"bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if bw != \"scott\": # scipy default",
"the area under the KDE curve (or draw with filled contours when data",
"data if the plot should be on the y axis if vertical: x,",
"for shape of kernel to fit with. Bivariate KDE can only use gaussian",
"in np.unique(classes): cl_mask = classes == cl cl_values = values[cl_mask] cl_grid, cl_y =",
"if the plot should be on the y axis if vertical: x, y",
"kwargs.pop(\"label\", None) # Otherwise check if the data object has a name if",
"points in the evaluation grid. cut : scalar, optional Draw the estimate to",
"clipping if clip is None: clip = (-np.inf, np.inf) # Calculate the KDE",
"**kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if",
"on one of the axes.\"\"\" if ax is None: ax = plt.gca() #",
"# Set the density axis minimum to 0 if vertical: ax.set_xlim(0, auto=None) else:",
"kde.support, kde.density return grid, y def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a",
"If True, shade in the area under the KDE curve (or draw with",
"otherwise uses current axes. kwargs : key, value pairings Other keyword arguments are",
"variance to compute a kernel density estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]),",
"\"please upgrade scipy to use a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types):",
"cumulative : bool, optional If True, draw the cumulative distribution estimated by the",
"when data is bivariate). vertical : bool, optional If True, density is on",
"need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y)",
"ax = plt.gca() # Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y),",
": scalar, optional Draw the estimate to cut * bw from the extreme",
"facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical: ax.fill_betweenx(y,",
"classes if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name]",
"from scipy import stats from six import string_types import matplotlib.pyplot as plt from",
"optional If True, shade in the area under the KDE curve (or draw",
"np.array([]) elif _has_statsmodels: # Prefer using statsmodels for kernel flexibility x, y =",
"ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE of univariate data. shade : bool,",
"# Sort out the clipping if clip is None: clip = (-np.inf, np.inf)",
"alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical: ax.fill_betweenx(y, 0,",
"# 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False _univariate_kdeplot(x=x, y=y, vertical=vertical, legend=legend, ax=ax,",
"= np.array([]), np.array([]) elif _has_statsmodels: # Prefer using statsmodels for kernel flexibility x,",
"default msg = (\"Ignoring bandwidth choice, \" \"please upgrade scipy to use a",
"return grid, y def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a univariate kernel",
"statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are currently \" \"only implemented",
"as np import warnings from scipy import stats from six import string_types import",
"from six import string_types import matplotlib.pyplot as plt from scipy.integrate import trapz from",
"by the kde. ax : matplotlib axes, optional Axes to plot on, otherwise",
"the KDE curve (or draw with filled contours when data is bivariate). vertical",
"None) facecolor = color if facecolor is None else facecolor # Draw the",
"= kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\",",
"density estimate using statsmodels.\"\"\" fft = kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel,",
"= None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False _univariate_kdeplot(x=x, y=y, vertical=vertical,",
"scipy.integrate import trapz from explore.utils import Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels",
"------ x: array-like, (n_grid_points, ) The grid of values where the kde is",
"cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y",
"overall_kde cl_kdes: dict KDE for each class. Keys are class labels. overall_kde: dict",
"Code for shape of kernel to fit with. Bivariate KDE can only use",
"plot color facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs) color =",
"cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE of univariate data.",
"size, scalar factor, or scalar for each dimension of the bivariate plot. Note",
"under the KDE curve (or draw with filled contours when data is bivariate).",
"bw, gridsize, cut, clip) # Make sure the density is nonnegative y =",
"kernel density estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([]) elif _has_statsmodels: #",
"string_types): bw = \"scotts\" if bw == \"scott\" else bw bw = getattr(kde,",
"a label was specified in the call label = kwargs.pop(\"label\", None) # Otherwise",
"if facecolor is None else facecolor # Draw the KDE plot and, optionally,",
"'triw' }, optional Code for shape of kernel to fit with. Bivariate KDE",
"has a name if label is None and hasattr(x, \"name\"): label = x.name",
"_kwargs['label'] = cl if cl == overall_name: if not include_overall: continue _kwargs['ls'] =",
"Also compute the overall KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict KDE for",
"(n_grid_points, ) The values of the KDE. \"\"\" # Sort out the clipping",
"``statsmodels`` uses it directly, but ``scipy`` treats it as a scaling factor for",
"\"\"\" # TODO: do we really need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws)",
"{'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid, y): tot = trapz(y=y,",
"shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) #",
"to fit KDE. Can provide a pair of (low, high) bounds for bivariate",
"to compute a kernel density estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([])",
"= y, x # Check if a label was specified in the call",
"clip=clip) if cumulative: grid, y = kde.support, kde.cdf else: grid, y = kde.support,",
"continue _kwargs['ls'] = '--' # _kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label'] =",
"from scipy.integrate import trapz from explore.utils import Proportions try: import statsmodels.nonparametric.api as smnp",
"= plt.gca() # Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y],",
"ax is None: ax = plt.gca() # Make sure the density is nonnegative",
"of points in each class. Also compute the overall KDE. Output ------ cl_kdes,",
"vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw the legend here handles, labels",
"ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde",
"`gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are currently \"",
"kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y",
"in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y =",
"cut, clip, cumulative=False): \"\"\"Compute a univariate kernel density estimate using statsmodels.\"\"\" fft =",
"KDE. \"\"\" # Sort out the clipping if clip is None: clip =",
"have variance to compute a kernel density estimate.\" warnings.warn(msg, UserWarning) x, y =",
"on whether a univariate or bivariate plot is being drawn. Output ------ x:",
"= _univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid,",
"in the call label = kwargs.pop(\"label\", None) # Otherwise check if the data",
"optional Name of reference method to determine kernel size, scalar factor, or scalar",
"= max(data.min() - bw * cut, clip[0]) support_max = min(data.max() + bw *",
"True, draw the cumulative distribution estimated by the kde. ax : matplotlib axes,",
".2 _kwargs['zorder'] = 1 _kwargs['label'] = None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade']",
"Axes to plot on, otherwise uses current axes. kwargs : key, value pairings",
"= kde(grid) return grid, y def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support",
"cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values,",
"\"\"\"Compute a univariate kernel density estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw)",
"# Draw the KDE plot and, optionally, shade ax.plot(x, y, color=color, label=label, **kwargs)",
"include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label']",
"= \"_nolegend_\" if label is None else label # Use the active color",
"= kde.support, kde.density return grid, y def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute",
"Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x,",
"directly, but ``scipy`` treats it as a scaling factor for the standard deviation",
"from explore.utils import Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels = True except",
"if clip is None: clip = (-np.inf, np.inf) # Calculate the KDE if",
"_scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make sure the density is nonnegative y",
"requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are currently \" \"only",
"a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw = \"scotts\" if bw",
"for a kernel density estimate.\"\"\" support_min = max(data.min() - bw * cut, clip[0])",
"# Otherwise check if the data object has a name if label is",
"drawn. Output ------ x: array-like, (n_grid_points, ) The grid of values where the",
"bw = \"scotts\" if bw == \"scott\" else bw bw = getattr(kde, \"%s_factor\"",
"Decide if we're going to add a legend legend = label is not",
"determine kernel size, scalar factor, or scalar for each dimension of the bivariate",
"def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a univariate kernel density estimate using",
"nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def _statsmodels_univariate_kde(data, kernel, bw,",
"the y axis if vertical: x, y = y, x # Check if",
"= \"gau\" msg = \"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if",
": bool, optional If True, density is on x-axis. kernel : {'gau' |",
"True, add a legend or label the axes when possible. cumulative : bool,",
"statsmodels.\"\"\" fft = kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize,",
"cl_y} return cl_kdes, overall_kde def norm_kde(grid, y): tot = trapz(y=y, x=grid) return y",
"the axes.\"\"\" if ax is None: ax = plt.gca() # Make sure the",
"y], axis=1) # Flip the data if the plot should be on the",
"Draw the KDE plot and, optionally, shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws",
"clip=None): \"\"\"Establish support for a kernel density estimate.\"\"\" support_min = max(data.min() - bw",
"of scalars }, optional Name of reference method to determine kernel size, scalar",
"\"\"\"Compute a univariate kernel density estimate using statsmodels.\"\"\" fft = kernel == \"gau\"",
"smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y = kde.support,",
"shade : bool, optional If True, shade in the area under the KDE",
"bounds for bivariate plots. legend : bool, optional If True, add a legend",
"color cycle to find the plot color facecolor = kwargs.pop(\"facecolor\", None) line, =",
"in the area under the KDE curve (or draw with filled contours when",
"| 'tri' | 'triw' }, optional Code for shape of kernel to fit",
"optional Lower and upper bounds for datapoints used to fit KDE. Can provide",
"== cl cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y",
"msg = \"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise",
"Computes the KDE of each class then weights each KDE by the number",
"_kwargs['label'] = None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False _univariate_kdeplot(x=x, y=y,",
"for this parameter: ``statsmodels`` uses it directly, but ``scipy`` treats it as a",
"distribution estimated by the kde. ax : matplotlib axes, optional Axes to plot",
"under KDE by number of samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid,",
"we're going to add a legend legend = label is not None and",
"cl_labels[cl] else: _kwargs['label'] = cl if cl == overall_name: if not include_overall: continue",
"deviation of the data. gridsize : int, optional Number of discrete points in",
"kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate kernel density estimate using",
"current axes. kwargs : key, value pairings Other keyword arguments are passed to",
"for values with associated classes. Computes the KDE of each class then weights",
"getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid = _kde_support(data, bw, gridsize, cut, clip)",
"the KDE for each class for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade']",
"statsmodels for kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip,",
"flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative) else: #",
"else bw bw = getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid = _kde_support(data,",
"to find the plot color facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y,",
"= getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid = _kde_support(data, bw, gridsize, cut,",
"facecolor # Draw the KDE plot and, optionally, shade ax.plot(x, y, color=color, label=label,",
"cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is not None and cl in cl_labels:",
"the active color cycle to find the plot color facecolor = kwargs.pop(\"facecolor\", None)",
"and legend label = \"_nolegend_\" if label is None else label # Use",
"should be on the y axis if vertical: x, y = y, x",
"plot on, otherwise uses current axes. kwargs : key, value pairings Other keyword",
"bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE of",
"# scipy default msg = (\"Ignoring bandwidth choice, \" \"please upgrade scipy to",
"KDE on singular data msg = \"Data must have variance to compute a",
"{'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes = {} for cl in",
"= cl_palette[cl] if cl_labels is not None and cl in cl_labels: _kwargs['label'] =",
"import Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels = True except ImportError: _has_statsmodels",
"= False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None,",
"points. clip : pair of scalars, or pair of pair of scalars, optional",
"None and cl in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is not None",
"Overall KDE (i.e. ignoring class labels) \"\"\" # TODO: do we really need",
"x.name # Decide if we're going to add a legend legend = label",
"a univariate kernel density estimate using statsmodels.\"\"\" fft = kernel == \"gau\" kde",
"else: _kwargs['label'] = cl if cl == overall_name: if not include_overall: continue _kwargs['ls']",
"except TypeError: kde = stats.gaussian_kde(data) if bw != \"scott\": # scipy default msg",
"is None: ax = plt.gca() # Make sure the density is nonnegative y",
"ensure_norm=True, **kde_kws): \"\"\" KDEs for values with associated classes. Computes the KDE of",
"_univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde",
"The values of the KDE. \"\"\" # Sort out the clipping if clip",
"using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if",
"if cumulative: grid, y = kde.support, kde.cdf else: grid, y = kde.support, kde.density",
"weights each KDE by the number of points in each class. Also compute",
"being drawn. Output ------ x: array-like, (n_grid_points, ) The grid of values where",
"scipy import stats from six import string_types import matplotlib.pyplot as plt from scipy.integrate",
"use a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw = \"scotts\" if",
"class then weights each KDE by the number of points in each class.",
"to ``plt.plot()`` or ``plt.contour{f}`` depending on whether a univariate or bivariate plot is",
"scalar factor, or scalar for each dimension of the bivariate plot. Note that",
"legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE of univariate data. shade :",
"Keys are class labels. overall_kde: dict Overall KDE (i.e. ignoring class labels) \"\"\"",
"(n_grid_points, ) The grid of values where the kde is evaluated. y: array-like,",
"the KDE of univariate data. shade : bool, optional If True, shade in",
"| 'silverman' | scalar | pair of scalars }, optional Name of reference",
"points in each class. Also compute the overall KDE. Output ------ cl_kdes, overall_kde",
"Flip the data if the plot should be on the y axis if",
"else: ax.fill_between(x, 0, y, **shade_kws) # Set the density axis minimum to 0",
"handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True,",
"in each class. Also compute the overall KDE. Output ------ cl_kdes, overall_kde cl_kdes:",
"is on x-axis. kernel : {'gau' | 'cos' | 'biw' | 'epa' |",
"Output ------ cl_kdes, overall_kde cl_kdes: dict KDE for each class. Keys are class",
"}, optional Code for shape of kernel to fit with. Bivariate KDE can",
"else: grid, y = kde.support, kde.density return grid, y def _scipy_univariate_kde(data, bw, gridsize,",
"np import warnings from scipy import stats from six import string_types import matplotlib.pyplot",
"kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if bw != \"scott\":",
"the overall KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict KDE for each class.",
"color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color if facecolor is None",
"the KDE plot and, optionally, shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws =",
"0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade: if vertical: ax.fill_betweenx(y, 0, x,",
"is not None and cl in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is",
"isinstance(bw, string_types): bw = \"scotts\" if bw == \"scott\" else bw bw =",
"overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde =",
"# Don't try to compute KDE on singular data msg = \"Data must",
"computational libraries have different interperetations for this parameter: ``statsmodels`` uses it directly, but",
"to scipy if missing statsmodels if kernel != \"gau\": kernel = \"gau\" msg",
"Number of discrete points in the evaluation grid. cut : scalar, optional Draw",
"\"%s_factor\" % bw)() * np.std(data) grid = _kde_support(data, bw, gridsize, cut, clip) y",
"x: array-like, (n_grid_points, ) The grid of values where the kde is evaluated.",
"None else label # Use the active color cycle to find the plot",
"ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True, vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}):",
"**kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE by",
"density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def _statsmodels_univariate_kde(data,",
"to 0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw the legend",
"a kernel density estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([]) elif _has_statsmodels:",
"estimate.\" warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([]) elif _has_statsmodels: # Prefer using",
"ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw the legend here handles, labels =",
"univariate or bivariate plot is being drawn. Output ------ x: array-like, (n_grid_points, )",
"cl_palette[cl] if cl_labels is not None and cl in cl_labels: _kwargs['label'] = cl_labels[cl]",
"'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False _univariate_kdeplot(x=x, y=y, vertical=vertical, legend=legend, ax=ax, **_kwargs)",
"provide a pair of (low, high) bounds for bivariate plots. legend : bool,",
"\"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid,",
"shape of kernel to fit with. Bivariate KDE can only use gaussian kernel.",
"clip) # Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1)",
"label the axes when possible. cumulative : bool, optional If True, draw the",
"\"\"\" KDEs for values with associated classes. Computes the KDE of each class",
"axes when possible. cumulative : bool, optional If True, draw the cumulative distribution",
"label = kwargs.pop(\"label\", None) # Otherwise check if the data object has a",
"in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde #",
"KDE curve (or draw with filled contours when data is bivariate). vertical :",
"= kde.support, kde.cdf else: grid, y = kde.support, kde.density return grid, y def",
"of univariate data. shade : bool, optional If True, shade in the area",
"int, optional Number of discrete points in the evaluation grid. cut : scalar,",
"if the data object has a name if label is None and hasattr(x,",
"ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props =",
"for each class for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade",
"_statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative) else: # Fall back to scipy",
"bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for a kernel density estimate.\"\"\" support_min =",
"density estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde =",
"axis=1) # Flip the data if the plot should be on the y",
"np.std(data) grid = _kde_support(data, bw, gridsize, cut, clip) y = kde(grid) return grid,",
"curve (or draw with filled contours when data is bivariate). vertical : bool,",
"the plot color facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs) color",
"warnings.warn(msg, UserWarning) x, y = np.array([]), np.array([]) elif _has_statsmodels: # Prefer using statsmodels",
"if cumulative: raise ImportError(\"Cumulative distributions are currently \" \"only implemented in statsmodels. \"",
"cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is not None and cl in cl_palette:",
"possible. cumulative : bool, optional If True, draw the cumulative distribution estimated by",
"= shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is not None",
"back to scipy if missing statsmodels if kernel != \"gau\": kernel = \"gau\"",
"a legend legend = label is not None and legend label = \"_nolegend_\"",
"evaluated. y: array-like, (n_grid_points, ) The values of the KDE. \"\"\" # Sort",
"y = np.array([]), np.array([]) elif _has_statsmodels: # Prefer using statsmodels for kernel flexibility",
"for cl in np.unique(classes): cl_mask = classes == cl cl_values = values[cl_mask] cl_grid,",
"kde = stats.gaussian_kde(data) if bw != \"scott\": # scipy default msg = (\"Ignoring",
"Draw the legend here handles, labels = ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\")",
"dict Overall KDE (i.e. ignoring class labels) \"\"\" # TODO: do we really",
"stats.gaussian_kde(data) if bw != \"scott\": # scipy default msg = (\"Ignoring bandwidth choice,",
"to compute KDE on singular data msg = \"Data must have variance to",
"standard deviation of the data. gridsize : int, optional Number of discrete points",
"KDE by the number of points in each class. Also compute the overall",
"* np.std(data) grid = _kde_support(data, bw, gridsize, cut, clip) y = kde(grid) return",
"cl_kdes[cl]['y'] if cl_palette is not None and cl in cl_palette: _kwargs['color'] = cl_palette[cl]",
"warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are currently \" \"only implemented in",
"scalars }, optional Name of reference method to determine kernel size, scalar factor,",
"support_max = min(data.max() + bw * cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def",
"using statsmodels.\"\"\" fft = kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft,",
"try to compute KDE on singular data msg = \"Data must have variance",
"to add a legend legend = label is not None and legend label",
"bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate kernel density estimate using statsmodels.\"\"\"",
"and hasattr(x, \"name\"): label = x.name # Decide if we're going to add",
"array-like, (n_grid_points, ) The grid of values where the kde is evaluated. y:",
"is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data if the",
"filled contours when data is bivariate). vertical : bool, optional If True, density",
"or scalar for each dimension of the bivariate plot. Note that the underlying",
"univariate kernel density estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError:",
"*= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid,",
"on x-axis. kernel : {'gau' | 'cos' | 'biw' | 'epa' | 'tri'",
"y], axis=1) return x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False):",
"vertical=False, legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) # in",
"cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight",
"cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde def",
"of the axes.\"\"\" if ax is None: ax = plt.gca() # Make sure",
"y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel density estimate on",
"cl in np.unique(classes): cl_mask = classes == cl cl_values = values[cl_mask] cl_grid, cl_y",
"ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) # in case 'overall'",
"dimension of the bivariate plot. Note that the underlying computational libraries have different",
"high) bounds for bivariate plots. legend : bool, optional If True, add a",
"= \"Data must have variance to compute a kernel density estimate.\" warnings.warn(msg, UserWarning)",
"factor, or scalar for each dimension of the bivariate plot. Note that the",
"each class. Keys are class labels. overall_kde: dict Overall KDE (i.e. ignoring class",
"= 'overall' cl_kdes[overall_name] = overall_kde # plot the KDE for each class for",
"= (\"Ignoring bandwidth choice, \" \"please upgrade scipy to use a different bandwidth.\")",
"add a legend legend = label is not None and legend label =",
"ignoring class labels) \"\"\" # TODO: do we really need ensure_norm overall_grid, overall_y",
"distributions are currently \" \"only implemented in statsmodels. \" \"Please install statsmodels.\") x,",
"= _scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make sure the density is nonnegative",
"= stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if bw != \"scott\": #",
"a univariate kernel density estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except",
"legend label = \"_nolegend_\" if label is None else label # Use the",
"Output ------ x: array-like, (n_grid_points, ) The grid of values where the kde",
"= np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut,",
"cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs",
"is one of the classes if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else:",
"a scaling factor for the standard deviation of the data. gridsize : int,",
"classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values with associated classes. Computes the KDE",
"!= \"gau\": kernel = \"gau\" msg = \"Kernel other than `gau` requires statsmodels.\"",
"y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data if the plot should",
"shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\",",
"the classes if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name = 'overall'",
"libraries have different interperetations for this parameter: ``statsmodels`` uses it directly, but ``scipy``",
"y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize,",
"data points. clip : pair of scalars, or pair of pair of scalars,",
"of values where the kde is evaluated. y: array-like, (n_grid_points, ) The values",
"statsmodels.nonparametric.api as smnp _has_statsmodels = True except ImportError: _has_statsmodels = False def _univariate_kde(data,",
"cut, clip, cumulative=cumulative) else: # Fall back to scipy if missing statsmodels if",
"parameter: ``statsmodels`` uses it directly, but ``scipy`` treats it as a scaling factor",
"if bw == \"scott\" else bw bw = getattr(kde, \"%s_factor\" % bw)() *",
"contours when data is bivariate). vertical : bool, optional If True, density is",
"norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes =",
"cl_palette is not None and cl in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels",
"bw : {'scott' | 'silverman' | scalar | pair of scalars }, optional",
"value pairings Other keyword arguments are passed to ``plt.plot()`` or ``plt.contour{f}`` depending on",
"= norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes",
"= stats.gaussian_kde(data) if bw != \"scott\": # scipy default msg = (\"Ignoring bandwidth",
"ax=None, **kwargs): \"\"\"Plot a univariate kernel density estimate on one of the axes.\"\"\"",
"y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color if facecolor",
"get_class_kdes(values, classes, **kde_kws) # in case 'overall' is one of the classes if",
"= ''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde # plot the KDE",
"each class for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy() _kwargs['shade'] = shade x",
"True, density is on x-axis. kernel : {'gau' | 'cos' | 'biw' |",
"ImportError(\"Cumulative distributions are currently \" \"only implemented in statsmodels. \" \"Please install statsmodels.\")",
"legend and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes, cl_labels=None, cl_palette=None, include_overall=True, shade=True,",
"_kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is not",
"the call label = kwargs.pop(\"label\", None) # Otherwise check if the data object",
": bool, optional If True, draw the cumulative distribution estimated by the kde.",
"= values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y)",
"arguments are passed to ``plt.plot()`` or ``plt.contour{f}`` depending on whether a univariate or",
"shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), ) if shade:",
"# plot the KDE for each class for cl in cl_kdes.keys(): _kwargs =",
"bw)() * np.std(data) grid = _kde_support(data, bw, gridsize, cut, clip) y = kde(grid)",
"kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative) else:",
"labels = ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return ax def _univariate_conditional_kdeplot(values, classes,",
"pair of (low, high) bounds for bivariate plots. legend : bool, optional If",
"clip, cumulative=False): \"\"\"Compute a univariate kernel density estimate using statsmodels.\"\"\" fft = kernel",
"== \"scott\" else bw bw = getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid",
"optional If True, draw the cumulative distribution estimated by the kde. ax :",
"1 _kwargs['label'] = None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False _univariate_kdeplot(x=x,",
"y def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for a kernel density",
"\"scott\": # scipy default msg = (\"Ignoring bandwidth choice, \" \"please upgrade scipy",
"call label = kwargs.pop(\"label\", None) # Otherwise check if the data object has",
"grid = _kde_support(data, bw, gridsize, cut, clip) y = kde(grid) return grid, y",
"class. Also compute the overall KDE. Output ------ cl_kdes, overall_kde cl_kdes: dict KDE",
"cl_props = Proportions(classes) cl_kdes = {} for cl in np.unique(classes): cl_mask = classes",
"scalars, or pair of pair of scalars, optional Lower and upper bounds for",
"bivariate plot is being drawn. Output ------ x: array-like, (n_grid_points, ) The grid",
"Set the density axis minimum to 0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0,",
"label is None else label # Use the active color cycle to find",
"find the plot color facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs)",
"draw the cumulative distribution estimated by the kde. ax : matplotlib axes, optional",
"None # 'overall' _kwargs['color'] = 'gray' _kwargs['shade'] = False _univariate_kdeplot(x=x, y=y, vertical=vertical, legend=legend,",
"for each class. Keys are class labels. overall_kde: dict Overall KDE (i.e. ignoring",
"tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel",
"Prefer using statsmodels for kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize,",
"\"gau\": kernel = \"gau\" msg = \"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg,",
"overall_kde: dict Overall KDE (i.e. ignoring class labels) \"\"\" # TODO: do we",
"kernel density estimate using scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde",
"\"scotts\" if bw == \"scott\" else bw bw = getattr(kde, \"%s_factor\" % bw)()",
"KDEs for values with associated classes. Computes the KDE of each class then",
"return grid, y def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for a",
"in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is not None and cl in",
"the kde. ax : matplotlib axes, optional Axes to plot on, otherwise uses",
"the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data",
"True, shade in the area under the KDE curve (or draw with filled",
"overall_kde = get_class_kdes(values, classes, **kde_kws) # in case 'overall' is one of the",
"= min(data.max() + bw * cut, clip[1]) return np.linspace(support_min, support_max, gridsize) def get_class_kdes(values,",
"values with associated classes. Computes the KDE of each class then weights each",
"= {'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid, y): tot =",
"install statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make sure",
"elif _has_statsmodels: # Prefer using statsmodels for kernel flexibility x, y = _statsmodels_univariate_kde(data,",
"Lower and upper bounds for datapoints used to fit KDE. Can provide a",
"gridsize, cut, clip) # Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y),",
"estimate to cut * bw from the extreme data points. clip : pair",
"cl cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y =",
"uses it directly, but ``scipy`` treats it as a scaling factor for the",
"singular data msg = \"Data must have variance to compute a kernel density",
"if label is None else label # Use the active color cycle to",
"as plt from scipy.integrate import trapz from explore.utils import Proportions try: import statsmodels.nonparametric.api",
"legend or label the axes when possible. cumulative : bool, optional If True,",
"the density axis minimum to 0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None)",
"**kwargs): \"\"\"Plot a univariate kernel density estimate on one of the axes.\"\"\" if",
"is None else facecolor # Draw the KDE plot and, optionally, shade ax.plot(x,",
"kernel density estimate using statsmodels.\"\"\" fft = kernel == \"gau\" kde = smnp.KDEUnivariate(data)",
"if kernel != \"gau\": kernel = \"gau\" msg = \"Kernel other than `gau`",
"'overall' cl_kdes[overall_name] = overall_kde # plot the KDE for each class for cl",
"_kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label'] = None # 'overall' _kwargs['color'] =",
"0, x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) # Set the density axis",
"but ``scipy`` treats it as a scaling factor for the standard deviation of",
"# weight area under KDE by number of samples cl_y *= cl_props[cl] cl_kdes[cl]",
"pair of scalars }, optional Name of reference method to determine kernel size,",
"# TODO: do we really need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if",
"if not include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha'] = .2 _kwargs['zorder'] =",
"missing statsmodels if kernel != \"gau\": kernel = \"gau\" msg = \"Kernel other",
"is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y def _statsmodels_univariate_kde(data, kernel,",
"% bw)() * np.std(data) grid = _kde_support(data, bw, gridsize, cut, clip) y =",
"KDE (i.e. ignoring class labels) \"\"\" # TODO: do we really need ensure_norm",
"color facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs) color = line.get_color()",
"None and cl in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl if",
"upper bounds for datapoints used to fit KDE. Can provide a pair of",
"scalar, optional Draw the estimate to cut * bw from the extreme data",
"is None and hasattr(x, \"name\"): label = x.name # Decide if we're going",
"------ cl_kdes, overall_kde cl_kdes: dict KDE for each class. Keys are class labels.",
"method to determine kernel size, scalar factor, or scalar for each dimension of",
"of each class then weights each KDE by the number of points in",
"y def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a univariate kernel density estimate",
"vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel density estimate on one of",
"tot = trapz(y=y, x=grid) return y / tot def _univariate_kdeplot(x, y, shade=True, vertical=False,",
"np.linspace(support_min, support_max, gridsize) def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values with",
"legend legend = label is not None and legend label = \"_nolegend_\" if",
"cl_grid, 'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid, y): tot = trapz(y=y, x=grid)",
"# Flip the data if the plot should be on the y axis",
"# Decide if we're going to add a legend legend = label is",
"norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE by number of samples cl_y *=",
"or label the axes when possible. cumulative : bool, optional If True, draw",
"labels) \"\"\" # TODO: do we really need ensure_norm overall_grid, overall_y = _univariate_kde(values,",
": {'gau' | 'cos' | 'biw' | 'epa' | 'tri' | 'triw' },",
"# Check if a label was specified in the call label = kwargs.pop(\"label\",",
"and, optionally, shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\",",
"bivariate). vertical : bool, optional If True, density is on x-axis. kernel :",
"the bivariate plot. Note that the underlying computational libraries have different interperetations for",
"density estimate on one of the axes.\"\"\" if ax is None: ax =",
"if label is None and hasattr(x, \"name\"): label = x.name # Decide if",
"* bw from the extreme data points. clip : pair of scalars, or",
"\" \"Please install statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize, cut, clip) #",
"'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid, y): tot = trapz(y=y, x=grid) return",
"kernel : {'gau' | 'cos' | 'biw' | 'epa' | 'tri' | 'triw'",
"def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate kernel density",
"# Prefer using statsmodels for kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw,",
"cl_mask = classes == cl cl_values = values[cl_mask] cl_grid, cl_y = _univariate_kde(cl_values, **kde_kws)",
"== \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative:",
"have different interperetations for this parameter: ``statsmodels`` uses it directly, but ``scipy`` treats",
"= ax.plot(x, y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color",
"scalar for each dimension of the bivariate plot. Note that the underlying computational",
"add a legend or label the axes when possible. cumulative : bool, optional",
"of the KDE. \"\"\" # Sort out the clipping if clip is None:",
"bw, gridsize, cut, clip): \"\"\"Compute a univariate kernel density estimate using scipy.\"\"\" try:",
"uses current axes. kwargs : key, value pairings Other keyword arguments are passed",
"np.nan_to_num(data.var()) == 0: # Don't try to compute KDE on singular data msg",
"on the y axis if vertical: x, y = y, x # Check",
"# Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) #",
"the data object has a name if label is None and hasattr(x, \"name\"):",
"vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the",
"of reference method to determine kernel size, scalar factor, or scalar for each",
"= kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip)",
"cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde def norm_kde(grid, y):",
"shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is not None and",
"| 'biw' | 'epa' | 'tri' | 'triw' }, optional Code for shape",
"of (low, high) bounds for bivariate plots. legend : bool, optional If True,",
": bool, optional If True, add a legend or label the axes when",
"sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) return x, y",
"= {} for cl in np.unique(classes): cl_mask = classes == cl cl_values =",
"vertical: x, y = y, x # Check if a label was specified",
"= _univariate_kde(cl_values, **kde_kws) if ensure_norm: cl_y = norm_kde(grid=cl_grid, y=cl_y) # weight area under",
") The grid of values where the kde is evaluated. y: array-like, (n_grid_points,",
"support_min = max(data.min() - bw * cut, clip[0]) support_max = min(data.max() + bw",
"scalar | pair of scalars }, optional Name of reference method to determine",
"cut * bw from the extreme data points. clip : pair of scalars,",
"plot. Note that the underlying computational libraries have different interperetations for this parameter:",
"cl in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl if cl ==",
"case 'overall' is one of the classes if 'overall' in np.unique(classes): overall_name =",
"the KDE. \"\"\" # Sort out the clipping if clip is None: clip",
"estimate using statsmodels.\"\"\" fft = kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw,",
"* cut, clip[0]) support_max = min(data.max() + bw * cut, clip[1]) return np.linspace(support_min,",
"{'gau' | 'cos' | 'biw' | 'epa' | 'tri' | 'triw' }, optional",
"'y': overall_y} cl_props = Proportions(classes) cl_kdes = {} for cl in np.unique(classes): cl_mask",
"= cl if cl == overall_name: if not include_overall: continue _kwargs['ls'] = '--'",
"cumulative: grid, y = kde.support, kde.cdf else: grid, y = kde.support, kde.density return",
"cut, clip) # Make sure the density is nonnegative y = np.amax(np.c_[np.zeros_like(y), y],",
"UserWarning) if isinstance(bw, string_types): bw = \"scotts\" if bw == \"scott\" else bw",
"using statsmodels for kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut,",
"axis=1) return x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute",
"a legend or label the axes when possible. cumulative : bool, optional If",
"get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values with associated classes. Computes the",
"'--' # _kwargs['alpha'] = .2 _kwargs['zorder'] = 1 _kwargs['label'] = None # 'overall'",
"cut, clip): \"\"\"Compute a univariate kernel density estimate using scipy.\"\"\" try: kde =",
"grid, y = kde.support, kde.cdf else: grid, y = kde.support, kde.density return grid,",
"really need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm: overall_y = norm_kde(grid=overall_grid,",
"pair of scalars, or pair of pair of scalars, optional Lower and upper",
"kernel to fit with. Bivariate KDE can only use gaussian kernel. bw :",
"estimate.\"\"\" support_min = max(data.min() - bw * cut, clip[0]) support_max = min(data.max() +",
"TypeError: kde = stats.gaussian_kde(data) if bw != \"scott\": # scipy default msg =",
"and upper bounds for datapoints used to fit KDE. Can provide a pair",
"{} for cl in np.unique(classes): cl_mask = classes == cl cl_values = values[cl_mask]",
"\"name\"): label = x.name # Decide if we're going to add a legend",
"The grid of values where the kde is evaluated. y: array-like, (n_grid_points, )",
"False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False,",
"Calculate the KDE if np.nan_to_num(data.var()) == 0: # Don't try to compute KDE",
"upgrade scipy to use a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw",
"legend=True, ax=None, **kwargs): \"\"\"Plot a univariate kernel density estimate on one of the",
"class. Keys are class labels. overall_kde: dict Overall KDE (i.e. ignoring class labels)",
"facecolor = color if facecolor is None else facecolor # Draw the KDE",
"bool, optional If True, draw the cumulative distribution estimated by the kde. ax",
"Other keyword arguments are passed to ``plt.plot()`` or ``plt.contour{f}`` depending on whether a",
"of the data. gridsize : int, optional Number of discrete points in the",
"bounds for datapoints used to fit KDE. Can provide a pair of (low,",
"kernel == \"gau\" kde = smnp.KDEUnivariate(data) kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if",
"each class then weights each KDE by the number of points in each",
"None: ax = plt.gca() # Make sure the density is nonnegative y =",
"if ax is None: ax = plt.gca() # Make sure the density is",
"if cl == overall_name: if not include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha']",
"return cl_kdes, overall_kde def norm_kde(grid, y): tot = trapz(y=y, x=grid) return y /",
"ax.plot(x, y, **kwargs) color = line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color if",
"# in case 'overall' is one of the classes if 'overall' in np.unique(classes):",
"of samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return cl_kdes,",
"name if label is None and hasattr(x, \"name\"): label = x.name # Decide",
"specified in the call label = kwargs.pop(\"label\", None) # Otherwise check if the",
"explore.utils import Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels = True except ImportError:",
"shade: if vertical: ax.fill_betweenx(y, 0, x, **shade_kws) else: ax.fill_between(x, 0, y, **shade_kws) #",
"**shade_kws) # Set the density axis minimum to 0 if vertical: ax.set_xlim(0, auto=None)",
"**kde_kws) # in case 'overall' is one of the classes if 'overall' in",
"and cl in cl_palette: _kwargs['color'] = cl_palette[cl] if cl_labels is not None and",
"label is None and hasattr(x, \"name\"): label = x.name # Decide if we're",
"raise ImportError(\"Cumulative distributions are currently \" \"only implemented in statsmodels. \" \"Please install",
": int, optional Number of discrete points in the evaluation grid. cut :",
"weight area under KDE by number of samples cl_y *= cl_props[cl] cl_kdes[cl] =",
"is None: clip = (-np.inf, np.inf) # Calculate the KDE if np.nan_to_num(data.var()) ==",
"stats from six import string_types import matplotlib.pyplot as plt from scipy.integrate import trapz",
"data object has a name if label is None and hasattr(x, \"name\"): label",
"Bivariate KDE can only use gaussian kernel. bw : {'scott' | 'silverman' |",
"to determine kernel size, scalar factor, or scalar for each dimension of the",
"return x, y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a",
"grid. cut : scalar, optional Draw the estimate to cut * bw from",
"bw, gridsize, cut, clip) y = kde(grid) return grid, y def _kde_support(data, bw,",
"y): tot = trapz(y=y, x=grid) return y / tot def _univariate_kdeplot(x, y, shade=True,",
"plot is being drawn. Output ------ x: array-like, (n_grid_points, ) The grid of",
"treats it as a scaling factor for the standard deviation of the data.",
"estimate on one of the axes.\"\"\" if ax is None: ax = plt.gca()",
"as a scaling factor for the standard deviation of the data. gridsize :",
"clip is None: clip = (-np.inf, np.inf) # Calculate the KDE if np.nan_to_num(data.var())",
"bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw = \"scotts\" if bw == \"scott\"",
"_has_statsmodels: # Prefer using statsmodels for kernel flexibility x, y = _statsmodels_univariate_kde(data, kernel,",
"kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette is",
"def get_class_kdes(values, classes, ensure_norm=True, **kde_kws): \"\"\" KDEs for values with associated classes. Computes",
"x-axis. kernel : {'gau' | 'cos' | 'biw' | 'epa' | 'tri' |",
"pair of pair of scalars, optional Lower and upper bounds for datapoints used",
"kde.fit(kernel, bw, fft, gridsize=gridsize, cut=cut, clip=clip) if cumulative: grid, y = kde.support, kde.cdf",
"y=cl_y) # weight area under KDE by number of samples cl_y *= cl_props[cl]",
"kde.density return grid, y def _scipy_univariate_kde(data, bw, gridsize, cut, clip): \"\"\"Compute a univariate",
"\"\"\"Plot a univariate kernel density estimate on one of the axes.\"\"\" if ax",
"= x.name # Decide if we're going to add a legend legend =",
"(\"Ignoring bandwidth choice, \" \"please upgrade scipy to use a different bandwidth.\") warnings.warn(msg,",
"pair of scalars, optional Lower and upper bounds for datapoints used to fit",
"Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels = True except ImportError: _has_statsmodels =",
"label=label, **kwargs) shade_kws = dict( facecolor=facecolor, alpha=kwargs.get(\"alpha\", 0.25), clip_on=kwargs.get(\"clip_on\", True), zorder=kwargs.get(\"zorder\", 1), )",
"in case 'overall' is one of the classes if 'overall' in np.unique(classes): overall_name",
"TODO: do we really need ensure_norm overall_grid, overall_y = _univariate_kde(values, **kde_kws) if ensure_norm:",
"**kwargs): \"\"\" Computes the KDE of univariate data. shade : bool, optional If",
"the number of points in each class. Also compute the overall KDE. Output",
"= (-np.inf, np.inf) # Calculate the KDE if np.nan_to_num(data.var()) == 0: # Don't",
"to cut * bw from the extreme data points. clip : pair of",
"'tri' | 'triw' }, optional Code for shape of kernel to fit with.",
"kernel. bw : {'scott' | 'silverman' | scalar | pair of scalars },",
"KDE. Can provide a pair of (low, high) bounds for bivariate plots. legend",
"object has a name if label is None and hasattr(x, \"name\"): label =",
"cl_kdes[overall_name] = overall_kde # plot the KDE for each class for cl in",
"gridsize, cut, clip) y = kde(grid) return grid, y def _kde_support(data, bw, gridsize='default',",
"= norm_kde(grid=cl_grid, y=cl_y) # weight area under KDE by number of samples cl_y",
"y def _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate kernel",
"try: import statsmodels.nonparametric.api as smnp _has_statsmodels = True except ImportError: _has_statsmodels = False",
"= \"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative",
"\"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions",
"grid of values where the kde is evaluated. y: array-like, (n_grid_points, ) The",
"axes, optional Axes to plot on, otherwise uses current axes. kwargs : key,",
"Sort out the clipping if clip is None: clip = (-np.inf, np.inf) #",
"if np.nan_to_num(data.var()) == 0: # Don't try to compute KDE on singular data",
"one of the axes.\"\"\" if ax is None: ax = plt.gca() # Make",
"bool, optional If True, shade in the area under the KDE curve (or",
"cl if cl == overall_name: if not include_overall: continue _kwargs['ls'] = '--' #",
"of the classes if 'overall' in np.unique(classes): overall_name = ''.join(np.unique(classes)) else: overall_name =",
"kernel size, scalar factor, or scalar for each dimension of the bivariate plot.",
"draw with filled contours when data is bivariate). vertical : bool, optional If",
"cl_kdes, overall_kde cl_kdes: dict KDE for each class. Keys are class labels. overall_kde:",
"= kwargs.pop(\"label\", None) # Otherwise check if the data object has a name",
"= color if facecolor is None else facecolor # Draw the KDE plot",
"facecolor is None else facecolor # Draw the KDE plot and, optionally, shade",
"# Use the active color cycle to find the plot color facecolor =",
"the cumulative distribution estimated by the kde. ax : matplotlib axes, optional Axes",
"samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y': cl_y} return cl_kdes, overall_kde",
"kwargs.pop(\"color\", None) facecolor = color if facecolor is None else facecolor # Draw",
"color if facecolor is None else facecolor # Draw the KDE plot and,",
"bw, gridsize, cut, clip, cumulative=cumulative) else: # Fall back to scipy if missing",
"statsmodels.\") x, y = _scipy_univariate_kde(data, bw, gridsize, cut, clip) # Make sure the",
"univariate kernel density estimate using statsmodels.\"\"\" fft = kernel == \"gau\" kde =",
"= np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data if the plot should be",
"for each dimension of the bivariate plot. Note that the underlying computational libraries",
"than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning) if cumulative: raise ImportError(\"Cumulative distributions are currently",
"None and hasattr(x, \"name\"): label = x.name # Decide if we're going to",
"label = x.name # Decide if we're going to add a legend legend",
"cumulative=False): \"\"\"Compute a univariate kernel density estimate using statsmodels.\"\"\" fft = kernel ==",
"auto=None) else: ax.set_ylim(0, auto=None) # Draw the legend here handles, labels = ax.get_legend_handles_labels()",
"scipy if missing statsmodels if kernel != \"gau\": kernel = \"gau\" msg =",
"except ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3,",
"= kde_plt_kws.copy() _kwargs['shade'] = shade x = cl_kdes[cl]['grid'] y = cl_kdes[cl]['y'] if cl_palette",
") The values of the KDE. \"\"\" # Sort out the clipping if",
"statsmodels if kernel != \"gau\": kernel = \"gau\" msg = \"Kernel other than",
"kernel = \"gau\" msg = \"Kernel other than `gau` requires statsmodels.\" warnings.warn(msg, UserWarning)",
"kde(grid) return grid, y def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for",
"if ensure_norm: overall_y = norm_kde(grid=overall_grid, y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props",
"y=overall_y) overall_kde = {'grid': overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes = {}",
"y: array-like, (n_grid_points, ) The values of the KDE. \"\"\" # Sort out",
"is being drawn. Output ------ x: array-like, (n_grid_points, ) The grid of values",
"/ tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs): \"\"\"Plot a univariate",
"x, y = y, x # Check if a label was specified in",
"minimum to 0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw the",
"not include_overall: continue _kwargs['ls'] = '--' # _kwargs['alpha'] = .2 _kwargs['zorder'] = 1",
"to plot on, otherwise uses current axes. kwargs : key, value pairings Other",
"KDE by number of samples cl_y *= cl_props[cl] cl_kdes[cl] = {'grid': cl_grid, 'y':",
"''.join(np.unique(classes)) else: overall_name = 'overall' cl_kdes[overall_name] = overall_kde # plot the KDE for",
"the underlying computational libraries have different interperetations for this parameter: ``statsmodels`` uses it",
"kernel density estimate on one of the axes.\"\"\" if ax is None: ax",
"= Proportions(classes) cl_kdes = {} for cl in np.unique(classes): cl_mask = classes ==",
"are currently \" \"only implemented in statsmodels. \" \"Please install statsmodels.\") x, y",
"plot the KDE for each class for cl in cl_kdes.keys(): _kwargs = kde_plt_kws.copy()",
"data msg = \"Data must have variance to compute a kernel density estimate.\"",
"scipy.\"\"\" try: kde = stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if bw",
"bw != \"scott\": # scipy default msg = (\"Ignoring bandwidth choice, \" \"please",
"_kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl if cl == overall_name: if not",
"a pair of (low, high) bounds for bivariate plots. legend : bool, optional",
"cut=3, clip=None): \"\"\"Establish support for a kernel density estimate.\"\"\" support_min = max(data.min() -",
"can only use gaussian kernel. bw : {'scott' | 'silverman' | scalar |",
"different interperetations for this parameter: ``statsmodels`` uses it directly, but ``scipy`` treats it",
"scipy default msg = (\"Ignoring bandwidth choice, \" \"please upgrade scipy to use",
"of the bivariate plot. Note that the underlying computational libraries have different interperetations",
"nonnegative y = np.amax(np.c_[np.zeros_like(y), y], axis=1) # Flip the data if the plot",
"KDE plot and, optionally, shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws = dict(",
"and cl in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label'] = cl if cl",
"_has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False, kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True,",
"axes.\"\"\" if ax is None: ax = plt.gca() # Make sure the density",
"bw = getattr(kde, \"%s_factor\" % bw)() * np.std(data) grid = _kde_support(data, bw, gridsize,",
"def norm_kde(grid, y): tot = trapz(y=y, x=grid) return y / tot def _univariate_kdeplot(x,",
"of pair of scalars, optional Lower and upper bounds for datapoints used to",
"plot should be on the y axis if vertical: x, y = y,",
"legend here handles, labels = ax.get_legend_handles_labels() if legend and handles: ax.legend(loc=\"best\") return ax",
"import numpy as np import warnings from scipy import stats from six import",
"the axes when possible. cumulative : bool, optional If True, draw the cumulative",
"stats.gaussian_kde(data, bw_method=bw) except TypeError: kde = stats.gaussian_kde(data) if bw != \"scott\": # scipy",
"currently \" \"only implemented in statsmodels. \" \"Please install statsmodels.\") x, y =",
"passed to ``plt.plot()`` or ``plt.contour{f}`` depending on whether a univariate or bivariate plot",
"scipy to use a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw, string_types): bw =",
"\"\"\" # Sort out the clipping if clip is None: clip = (-np.inf,",
"0 if vertical: ax.set_xlim(0, auto=None) else: ax.set_ylim(0, auto=None) # Draw the legend here",
"norm_kde(grid, y): tot = trapz(y=y, x=grid) return y / tot def _univariate_kdeplot(x, y,",
"smnp _has_statsmodels = True except ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False,",
"\"\"\" Computes the KDE of univariate data. shade : bool, optional If True,",
"pairings Other keyword arguments are passed to ``plt.plot()`` or ``plt.contour{f}`` depending on whether",
"gridsize, cut, clip, cumulative=cumulative) else: # Fall back to scipy if missing statsmodels",
"None: clip = (-np.inf, np.inf) # Calculate the KDE if np.nan_to_num(data.var()) == 0:",
"import trapz from explore.utils import Proportions try: import statsmodels.nonparametric.api as smnp _has_statsmodels =",
": matplotlib axes, optional Axes to plot on, otherwise uses current axes. kwargs",
"_kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for a kernel density estimate.\"\"\" support_min",
"a name if label is None and hasattr(x, \"name\"): label = x.name #",
"plot and, optionally, shade ax.plot(x, y, color=color, label=label, **kwargs) shade_kws = dict( facecolor=facecolor,",
"legend=True, ax=None, kde_kws={}, kde_plt_kws={}): cl_kdes, overall_kde = get_class_kdes(values, classes, **kde_kws) # in case",
"scaling factor for the standard deviation of the data. gridsize : int, optional",
"bw == \"scott\" else bw bw = getattr(kde, \"%s_factor\" % bw)() * np.std(data)",
"_has_statsmodels = True except ImportError: _has_statsmodels = False def _univariate_kde(data, shade=False, vertical=False, kernel='gau',",
"KDE if np.nan_to_num(data.var()) == 0: # Don't try to compute KDE on singular",
"overall_grid, 'y': overall_y} cl_props = Proportions(classes) cl_kdes = {} for cl in np.unique(classes):",
"x=grid) return y / tot def _univariate_kdeplot(x, y, shade=True, vertical=False, legend=True, ax=None, **kwargs):",
"facecolor = kwargs.pop(\"facecolor\", None) line, = ax.plot(x, y, **kwargs) color = line.get_color() line.remove()",
"reference method to determine kernel size, scalar factor, or scalar for each dimension",
"data. shade : bool, optional If True, shade in the area under the",
"then weights each KDE by the number of points in each class. Also",
"= .2 _kwargs['zorder'] = 1 _kwargs['label'] = None # 'overall' _kwargs['color'] = 'gray'",
"= line.get_color() line.remove() kwargs.pop(\"color\", None) facecolor = color if facecolor is None else",
"y = kde(grid) return grid, y def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish",
"clip = (-np.inf, np.inf) # Calculate the KDE if np.nan_to_num(data.var()) == 0: #",
"y axis if vertical: x, y = y, x # Check if a",
"gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE of univariate",
"on, otherwise uses current axes. kwargs : key, value pairings Other keyword arguments",
"x, y = _statsmodels_univariate_kde(data, kernel, bw, gridsize, cut, clip, cumulative=cumulative) else: # Fall",
"kernel='gau', bw=\"scott\", gridsize=100, cut=3, clip=None, legend=True, ax=None, cumulative=False, **kwargs): \"\"\" Computes the KDE",
"used to fit KDE. Can provide a pair of (low, high) bounds for",
"kde. ax : matplotlib axes, optional Axes to plot on, otherwise uses current",
"matplotlib axes, optional Axes to plot on, otherwise uses current axes. kwargs :",
"must have variance to compute a kernel density estimate.\" warnings.warn(msg, UserWarning) x, y",
"\" \"please upgrade scipy to use a different bandwidth.\") warnings.warn(msg, UserWarning) if isinstance(bw,",
"(low, high) bounds for bivariate plots. legend : bool, optional If True, add",
"cumulative: raise ImportError(\"Cumulative distributions are currently \" \"only implemented in statsmodels. \" \"Please",
"cl_labels is not None and cl in cl_labels: _kwargs['label'] = cl_labels[cl] else: _kwargs['label']",
"the extreme data points. clip : pair of scalars, or pair of pair",
"x, y = np.array([]), np.array([]) elif _has_statsmodels: # Prefer using statsmodels for kernel",
"the standard deviation of the data. gridsize : int, optional Number of discrete",
"cumulative=cumulative) else: # Fall back to scipy if missing statsmodels if kernel !=",
"grid, y def _kde_support(data, bw, gridsize='default', cut=3, clip=None): \"\"\"Establish support for a kernel",
"gridsize, cut, clip, cumulative=False): \"\"\"Compute a univariate kernel density estimate using statsmodels.\"\"\" fft",
"Proportions(classes) cl_kdes = {} for cl in np.unique(classes): cl_mask = classes == cl",
"y = y, x # Check if a label was specified in the",
"when possible. cumulative : bool, optional If True, draw the cumulative distribution estimated",
"univariate kernel density estimate on one of the axes.\"\"\" if ax is None:",
": key, value pairings Other keyword arguments are passed to ``plt.plot()`` or ``plt.contour{f}``",
"with filled contours when data is bivariate). vertical : bool, optional If True,"
] |
[
"= time.time() for i in range(1000000): x_ndarr = np.empty((5, 3)) t3 = time.time()",
"making zeros array comparison: # torch: 3.497465133666992 sec # numpy: 0.5160698890686035 sec #",
"winner: torch t4 = time.time() for i in range(1000000): x_tensor = torch.zeros(5, 3)",
"2.2384519577026367 sec # numpy: 2.758033275604248 sec # winner: torch t4 = time.time() for",
"range(1000000): x_tensor = torch.zeros(5, 3) t5 = time.time() for i in range(1000000): x_ndarr",
"in range(1000000): x_ndarr = np.zeros((5, 3)) t6 = time.time() print('making zeros array comparison:')",
"computer's outputs (macbook pro without cuda): # making zeros array comparison: # torch:",
"= t6 - t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if",
"# making zeros array comparison: # torch: 3.497465133666992 sec # numpy: 0.5160698890686035 sec",
"t2 = time.time() for i in range(1000000): x_ndarr = np.empty((5, 3)) t3 =",
"time.time() for i in range(1000000): x_tensor = torch.zeros(5, 3) t5 = time.time() for",
"< delta4 else 'numpy'}''') # my computer's outputs (macbook pro without cuda): #",
"empty array comparison:') delta1 = t2 - t1 delta2 = t3 - t1",
"cuda): # making empty array comparison: # torch: 2.2384519577026367 sec # numpy: 2.758033275604248",
"= np.empty((5, 3)) t3 = time.time() print('making empty array comparison:') delta1 = t2",
"# my computer's outputs (macbook pro without cuda): # making zeros array comparison:",
"'numpy'}''') # my computer's outputs (macbook pro without cuda): # making zeros array",
"sec # winner: torch t4 = time.time() for i in range(1000000): x_tensor =",
"print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1 < delta2 else 'numpy'}''') # my",
"'numpy'}''') # my computer's outputs (macbook pro without cuda): # making empty array",
"import time t1 = time.time() for i in range(1000000): x_tensor = torch.empty(5, 3)",
"= time.time() for i in range(1000000): x_tensor = torch.empty(5, 3) t2 = time.time()",
"for i in range(1000000): x_tensor = torch.empty(5, 3) t2 = time.time() for i",
"torch.empty(5, 3) t2 = time.time() for i in range(1000000): x_ndarr = np.empty((5, 3))",
"- t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1 <",
"sec') print(f'''winner: {'torch' if delta1 < delta2 else 'numpy'}''') # my computer's outputs",
"= torch.zeros(5, 3) t5 = time.time() for i in range(1000000): x_ndarr = np.zeros((5,",
"3) t5 = time.time() for i in range(1000000): x_ndarr = np.zeros((5, 3)) t6",
"for i in range(1000000): x_ndarr = np.empty((5, 3)) t3 = time.time() print('making empty",
"# torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec # winner: torch t4 =",
"= time.time() print('making empty array comparison:') delta1 = t2 - t1 delta2 =",
"i in range(1000000): x_ndarr = np.empty((5, 3)) t3 = time.time() print('making empty array",
"sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1 < delta2 else 'numpy'}''') #",
"outputs (macbook pro without cuda): # making empty array comparison: # torch: 2.2384519577026367",
"delta1 = t2 - t1 delta2 = t3 - t1 print(f'torch: {delta1} sec')",
"making empty array comparison: # torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec #",
"{delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3 < delta4 else 'numpy'}''')",
"x_ndarr = np.empty((5, 3)) t3 = time.time() print('making empty array comparison:') delta1 =",
"2.758033275604248 sec # winner: torch t4 = time.time() for i in range(1000000): x_tensor",
"range(1000000): x_ndarr = np.empty((5, 3)) t3 = time.time() print('making empty array comparison:') delta1",
"cuda): # making zeros array comparison: # torch: 3.497465133666992 sec # numpy: 0.5160698890686035",
"i in range(1000000): x_tensor = torch.empty(5, 3) t2 = time.time() for i in",
"my computer's outputs (macbook pro without cuda): # making empty array comparison: #",
"pro without cuda): # making empty array comparison: # torch: 2.2384519577026367 sec #",
"numpy: 2.758033275604248 sec # winner: torch t4 = time.time() for i in range(1000000):",
"t3 = time.time() print('making empty array comparison:') delta1 = t2 - t1 delta2",
"t3 - t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1",
"time t1 = time.time() for i in range(1000000): x_tensor = torch.empty(5, 3) t2",
"without cuda): # making zeros array comparison: # torch: 3.497465133666992 sec # numpy:",
"- t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3 <",
"t4 = time.time() for i in range(1000000): x_tensor = torch.zeros(5, 3) t5 =",
"range(1000000): x_ndarr = np.zeros((5, 3)) t6 = time.time() print('making zeros array comparison:') delta3",
"if delta1 < delta2 else 'numpy'}''') # my computer's outputs (macbook pro without",
"in range(1000000): x_tensor = torch.empty(5, 3) t2 = time.time() for i in range(1000000):",
"= time.time() for i in range(1000000): x_tensor = torch.zeros(5, 3) t5 = time.time()",
"= t3 - t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if",
"comparison: # torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec # winner: torch t4",
"np.empty((5, 3)) t3 = time.time() print('making empty array comparison:') delta1 = t2 -",
"np import torch import time t1 = time.time() for i in range(1000000): x_tensor",
"else 'numpy'}''') # my computer's outputs (macbook pro without cuda): # making empty",
"t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1 < delta2",
"without cuda): # making empty array comparison: # torch: 2.2384519577026367 sec # numpy:",
"= time.time() print('making zeros array comparison:') delta3 = t5 - t4 delta4 =",
"= torch.empty(5, 3) t2 = time.time() for i in range(1000000): x_ndarr = np.empty((5,",
"- t4 delta4 = t6 - t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec')",
"x_ndarr = np.zeros((5, 3)) t6 = time.time() print('making zeros array comparison:') delta3 =",
"# numpy: 2.758033275604248 sec # winner: torch t4 = time.time() for i in",
"print(f'''winner: {'torch' if delta1 < delta2 else 'numpy'}''') # my computer's outputs (macbook",
"delta1 < delta2 else 'numpy'}''') # my computer's outputs (macbook pro without cuda):",
"empty array comparison: # torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec # winner:",
"as np import torch import time t1 = time.time() for i in range(1000000):",
"range(1000000): x_tensor = torch.empty(5, 3) t2 = time.time() for i in range(1000000): x_ndarr",
"i in range(1000000): x_tensor = torch.zeros(5, 3) t5 = time.time() for i in",
"array comparison:') delta1 = t2 - t1 delta2 = t3 - t1 print(f'torch:",
"in range(1000000): x_ndarr = np.empty((5, 3)) t3 = time.time() print('making empty array comparison:')",
"t5 - t4 delta4 = t6 - t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4}",
"array comparison:') delta3 = t5 - t4 delta4 = t6 - t5 print(f'torch:",
"if delta3 < delta4 else 'numpy'}''') # my computer's outputs (macbook pro without",
"t2 - t1 delta2 = t3 - t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2}",
"outputs (macbook pro without cuda): # making zeros array comparison: # torch: 3.497465133666992",
"x_tensor = torch.zeros(5, 3) t5 = time.time() for i in range(1000000): x_ndarr =",
"print('making zeros array comparison:') delta3 = t5 - t4 delta4 = t6 -",
"array comparison: # torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec # winner: torch",
"else 'numpy'}''') # my computer's outputs (macbook pro without cuda): # making zeros",
"# my computer's outputs (macbook pro without cuda): # making empty array comparison:",
"- t1 delta2 = t3 - t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec')",
"(macbook pro without cuda): # making empty array comparison: # torch: 2.2384519577026367 sec",
"# winner: torch t4 = time.time() for i in range(1000000): x_tensor = torch.zeros(5,",
"print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3 < delta4 else",
"computer's outputs (macbook pro without cuda): # making empty array comparison: # torch:",
"my computer's outputs (macbook pro without cuda): # making zeros array comparison: #",
"in range(1000000): x_tensor = torch.zeros(5, 3) t5 = time.time() for i in range(1000000):",
"t6 = time.time() print('making zeros array comparison:') delta3 = t5 - t4 delta4",
"zeros array comparison:') delta3 = t5 - t4 delta4 = t6 - t5",
"t1 delta2 = t3 - t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner:",
"print('making empty array comparison:') delta1 = t2 - t1 delta2 = t3 -",
"delta3 < delta4 else 'numpy'}''') # my computer's outputs (macbook pro without cuda):",
"t5 = time.time() for i in range(1000000): x_ndarr = np.zeros((5, 3)) t6 =",
"comparison:') delta3 = t5 - t4 delta4 = t6 - t5 print(f'torch: {delta3}",
"= time.time() for i in range(1000000): x_ndarr = np.zeros((5, 3)) t6 = time.time()",
"time.time() for i in range(1000000): x_ndarr = np.zeros((5, 3)) t6 = time.time() print('making",
"delta2 else 'numpy'}''') # my computer's outputs (macbook pro without cuda): # making",
"torch.zeros(5, 3) t5 = time.time() for i in range(1000000): x_ndarr = np.zeros((5, 3))",
"array comparison: # torch: 3.497465133666992 sec # numpy: 0.5160698890686035 sec # winner: numpy",
"= t2 - t1 delta2 = t3 - t1 print(f'torch: {delta1} sec') print(f'numpy:",
"t1 = time.time() for i in range(1000000): x_tensor = torch.empty(5, 3) t2 =",
"pro without cuda): # making zeros array comparison: # torch: 3.497465133666992 sec #",
"time.time() for i in range(1000000): x_ndarr = np.empty((5, 3)) t3 = time.time() print('making",
"import numpy as np import torch import time t1 = time.time() for i",
"torch import time t1 = time.time() for i in range(1000000): x_tensor = torch.empty(5,",
"{delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1 < delta2 else 'numpy'}''')",
"np.zeros((5, 3)) t6 = time.time() print('making zeros array comparison:') delta3 = t5 -",
"3)) t6 = time.time() print('making zeros array comparison:') delta3 = t5 - t4",
"sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3 < delta4 else 'numpy'}''') #",
"{'torch' if delta1 < delta2 else 'numpy'}''') # my computer's outputs (macbook pro",
"delta4 else 'numpy'}''') # my computer's outputs (macbook pro without cuda): # making",
"3)) t3 = time.time() print('making empty array comparison:') delta1 = t2 - t1",
"for i in range(1000000): x_ndarr = np.zeros((5, 3)) t6 = time.time() print('making zeros",
"{'torch' if delta3 < delta4 else 'numpy'}''') # my computer's outputs (macbook pro",
"(macbook pro without cuda): # making zeros array comparison: # torch: 3.497465133666992 sec",
"torch t4 = time.time() for i in range(1000000): x_tensor = torch.zeros(5, 3) t5",
"delta2 = t3 - t1 print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch'",
"delta3 = t5 - t4 delta4 = t6 - t5 print(f'torch: {delta3} sec')",
"t6 - t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3",
"delta4 = t6 - t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch'",
"{delta2} sec') print(f'''winner: {'torch' if delta1 < delta2 else 'numpy'}''') # my computer's",
"zeros array comparison: # torch: 3.497465133666992 sec # numpy: 0.5160698890686035 sec # winner:",
"sec # numpy: 2.758033275604248 sec # winner: torch t4 = time.time() for i",
"<reponame>s-mostafa-a/a import numpy as np import torch import time t1 = time.time() for",
"comparison:') delta1 = t2 - t1 delta2 = t3 - t1 print(f'torch: {delta1}",
"numpy as np import torch import time t1 = time.time() for i in",
"sec') print(f'''winner: {'torch' if delta3 < delta4 else 'numpy'}''') # my computer's outputs",
"print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3 < delta4 else 'numpy'}''') # my",
"time.time() for i in range(1000000): x_tensor = torch.empty(5, 3) t2 = time.time() for",
"# making empty array comparison: # torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec",
"torch: 2.2384519577026367 sec # numpy: 2.758033275604248 sec # winner: torch t4 = time.time()",
"{delta4} sec') print(f'''winner: {'torch' if delta3 < delta4 else 'numpy'}''') # my computer's",
"import torch import time t1 = time.time() for i in range(1000000): x_tensor =",
"< delta2 else 'numpy'}''') # my computer's outputs (macbook pro without cuda): #",
"= np.zeros((5, 3)) t6 = time.time() print('making zeros array comparison:') delta3 = t5",
"print(f'''winner: {'torch' if delta3 < delta4 else 'numpy'}''') # my computer's outputs (macbook",
"3) t2 = time.time() for i in range(1000000): x_ndarr = np.empty((5, 3)) t3",
"i in range(1000000): x_ndarr = np.zeros((5, 3)) t6 = time.time() print('making zeros array",
"print(f'torch: {delta1} sec') print(f'numpy: {delta2} sec') print(f'''winner: {'torch' if delta1 < delta2 else",
"= t5 - t4 delta4 = t6 - t5 print(f'torch: {delta3} sec') print(f'numpy:",
"t4 delta4 = t6 - t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner:",
"for i in range(1000000): x_tensor = torch.zeros(5, 3) t5 = time.time() for i",
"x_tensor = torch.empty(5, 3) t2 = time.time() for i in range(1000000): x_ndarr =",
"time.time() print('making zeros array comparison:') delta3 = t5 - t4 delta4 = t6",
"time.time() print('making empty array comparison:') delta1 = t2 - t1 delta2 = t3",
"t5 print(f'torch: {delta3} sec') print(f'numpy: {delta4} sec') print(f'''winner: {'torch' if delta3 < delta4"
] |
[
"Counter[str]) -> Counter[str]: \"\"\" Applies a single step to the given pair_counter \"\"\"",
"ruleset = dict(rule.split(\" -> \") for rule in rules) return (template, ruleset) def",
"int]: \"\"\" Calculates the required answers given the original template and the pair",
"step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for _ in range(30): pair_counter = step(ruleset,",
"the lowest count from the highest count and returns the answer \"\"\" letter_counter",
"= \"../../input/14.txt\" Ruleset = dict[str, str] def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses",
"the highest count and returns the answer \"\"\" letter_counter = Counter(template[-1]) for pair,",
"for pair in zip(template, template[1:])) pair_counter = Counter(pairs) for _ in range(10): pair_counter",
"letter occurs by adding the counts of pairs where the given letter comes",
"the required answers given the original template and the pair insertion rules \"\"\"",
"required answers given the original template and the pair insertion rules \"\"\" pairs",
"return (part1, part2) if __name__ == \"__main__\": template, ruleset = parse_input() part1, part2",
"ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates the required answers given the original",
"Parses the input and returns the polymer template and the pair insertion rules",
"the counts of pairs where the given letter comes first and 1 for",
"calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\" Calculates how many times each letter",
"+= count return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\" Calculates",
"first and 1 for the last letter of the original template (which does",
"\") for rule in rules) return (template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str])",
"pair_counter.items(): inserted = ruleset[pair] first, second = pair new_pair_counter[first + inserted] += count",
"for the last letter of the original template (which does not change), then",
"the polymer template and the pair insertion rules \"\"\" with open(INPUT_FILE) as f:",
"for _ in range(30): pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return",
"original template (which does not change), then subtracts the lowest count from the",
"in range(10): pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for _ in",
"not change), then subtracts the lowest count from the highest count and returns",
"INPUT_FILE = \"../../input/14.txt\" Ruleset = dict[str, str] def parse_input() -> tuple[str, Ruleset]: \"\"\"",
"Counter[str]: \"\"\" Applies a single step to the given pair_counter \"\"\" new_pair_counter: Counter[str]",
"rules \"\"\" with open(INPUT_FILE) as f: template, _, *rules = f.read().splitlines() ruleset =",
"for pair, count in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] += count return",
"= calculate_answer(template, pair_counter) return (part1, part2) if __name__ == \"__main__\": template, ruleset =",
"_ in range(10): pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for _",
"lowest count from the highest count and returns the answer \"\"\" letter_counter =",
"count return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\" Calculates how",
"\"\"\" Applies a single step to the given pair_counter \"\"\" new_pair_counter: Counter[str] =",
"if __name__ == \"__main__\": template, ruleset = parse_input() part1, part2 = solve(template, ruleset)",
"Counter[str]) -> int: \"\"\" Calculates how many times each letter occurs by adding",
"count new_pair_counter[inserted + second] += count return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str])",
"from the highest count and returns the answer \"\"\" letter_counter = Counter(template[-1]) for",
"pair_counter: Counter[str]) -> int: \"\"\" Calculates how many times each letter occurs by",
"(which does not change), then subtracts the lowest count from the highest count",
"single step to the given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for pair,",
"the original template and the pair insertion rules \"\"\" pairs = (\"\".join(pair) for",
"-> \") for rule in rules) return (template, ruleset) def step(ruleset: Ruleset, pair_counter:",
"given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for pair, count in pair_counter.items(): inserted",
"= step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return (part1, part2) if __name__ ==",
"the pair insertion rules \"\"\" with open(INPUT_FILE) as f: template, _, *rules =",
"new_pair_counter[first + inserted] += count new_pair_counter[inserted + second] += count return new_pair_counter def",
"count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) -> tuple[int, int]:",
"pairs = (\"\".join(pair) for pair in zip(template, template[1:])) pair_counter = Counter(pairs) for _",
"= step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for _ in range(30): pair_counter =",
"pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for _ in range(30): pair_counter",
"part2) if __name__ == \"__main__\": template, ruleset = parse_input() part1, part2 = solve(template,",
"how many times each letter occurs by adding the counts of pairs where",
"by adding the counts of pairs where the given letter comes first and",
"min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates the required",
"with open(INPUT_FILE) as f: template, _, *rules = f.read().splitlines() ruleset = dict(rule.split(\" ->",
"in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values())",
"range(10): pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for _ in range(30):",
"in pair_counter.items(): inserted = ruleset[pair] first, second = pair new_pair_counter[first + inserted] +=",
"answers given the original template and the pair insertion rules \"\"\" pairs =",
"and the pair insertion rules \"\"\" pairs = (\"\".join(pair) for pair in zip(template,",
"*rules = f.read().splitlines() ruleset = dict(rule.split(\" -> \") for rule in rules) return",
"step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return (part1, part2) if __name__ == \"__main__\":",
"times each letter occurs by adding the counts of pairs where the given",
"= calculate_answer(template, pair_counter) for _ in range(30): pair_counter = step(ruleset, pair_counter) part2 =",
"Counter(template[-1]) for pair, count in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] += count",
"-> int: \"\"\" Calculates how many times each letter occurs by adding the",
"-> tuple[int, int]: \"\"\" Calculates the required answers given the original template and",
"counts of pairs where the given letter comes first and 1 for the",
"import Counter INPUT_FILE = \"../../input/14.txt\" Ruleset = dict[str, str] def parse_input() -> tuple[str,",
"returns the answer \"\"\" letter_counter = Counter(template[-1]) for pair, count in pair_counter.items(): first_letter,",
"highest count and returns the answer \"\"\" letter_counter = Counter(template[-1]) for pair, count",
"template, _, *rules = f.read().splitlines() ruleset = dict(rule.split(\" -> \") for rule in",
"calculate_answer(template, pair_counter) return (part1, part2) if __name__ == \"__main__\": template, ruleset = parse_input()",
"= dict(rule.split(\" -> \") for rule in rules) return (template, ruleset) def step(ruleset:",
"def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a single step to",
"f: template, _, *rules = f.read().splitlines() ruleset = dict(rule.split(\" -> \") for rule",
"Counter() for pair, count in pair_counter.items(): inserted = ruleset[pair] first, second = pair",
"-> tuple[str, Ruleset]: \"\"\" Parses the input and returns the polymer template and",
"given the original template and the pair insertion rules \"\"\" pairs = (\"\".join(pair)",
"the input and returns the polymer template and the pair insertion rules \"\"\"",
"pair insertion rules \"\"\" with open(INPUT_FILE) as f: template, _, *rules = f.read().splitlines()",
"pair_counter) return (part1, part2) if __name__ == \"__main__\": template, ruleset = parse_input() part1,",
"the given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for pair, count in pair_counter.items():",
"ruleset[pair] first, second = pair new_pair_counter[first + inserted] += count new_pair_counter[inserted + second]",
"template and the pair insertion rules \"\"\" pairs = (\"\".join(pair) for pair in",
"count in pair_counter.items(): inserted = ruleset[pair] first, second = pair new_pair_counter[first + inserted]",
"many times each letter occurs by adding the counts of pairs where the",
"given letter comes first and 1 for the last letter of the original",
"and 1 for the last letter of the original template (which does not",
"answer \"\"\" letter_counter = Counter(template[-1]) for pair, count in pair_counter.items(): first_letter, _ =",
"\"\"\" Parses the input and returns the polymer template and the pair insertion",
"tuple[str, Ruleset]: \"\"\" Parses the input and returns the polymer template and the",
"parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the input and returns the polymer template",
"original template and the pair insertion rules \"\"\" pairs = (\"\".join(pair) for pair",
"pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return (part1, part2) if __name__",
"each letter occurs by adding the counts of pairs where the given letter",
"the given letter comes first and 1 for the last letter of the",
"pair insertion rules \"\"\" pairs = (\"\".join(pair) for pair in zip(template, template[1:])) pair_counter",
"+= count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) -> tuple[int,",
"step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a single step to the",
"new_pair_counter: Counter[str] = Counter() for pair, count in pair_counter.items(): inserted = ruleset[pair] first,",
"return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) -> tuple[int, int]: \"\"\"",
"the pair insertion rules \"\"\" pairs = (\"\".join(pair) for pair in zip(template, template[1:]))",
"calculate_answer(template, pair_counter) for _ in range(30): pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template,",
"collections import Counter INPUT_FILE = \"../../input/14.txt\" Ruleset = dict[str, str] def parse_input() ->",
"return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\" Calculates how many",
"to the given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for pair, count in",
"range(30): pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return (part1, part2) if",
"Applies a single step to the given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter()",
"(template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a single",
"def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the input and returns the polymer",
"_ = pair letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str,",
"inserted = ruleset[pair] first, second = pair new_pair_counter[first + inserted] += count new_pair_counter[inserted",
"- min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates the",
"template[1:])) pair_counter = Counter(pairs) for _ in range(10): pair_counter = step(ruleset, pair_counter) part1",
"new_pair_counter[inserted + second] += count return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) ->",
"in range(30): pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return (part1, part2)",
"\"\"\" with open(INPUT_FILE) as f: template, _, *rules = f.read().splitlines() ruleset = dict(rule.split(\"",
"\"\"\" letter_counter = Counter(template[-1]) for pair, count in pair_counter.items(): first_letter, _ = pair",
"zip(template, template[1:])) pair_counter = Counter(pairs) for _ in range(10): pair_counter = step(ruleset, pair_counter)",
"in zip(template, template[1:])) pair_counter = Counter(pairs) for _ in range(10): pair_counter = step(ruleset,",
"letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) ->",
"insertion rules \"\"\" pairs = (\"\".join(pair) for pair in zip(template, template[1:])) pair_counter =",
"Calculates the required answers given the original template and the pair insertion rules",
"pairs where the given letter comes first and 1 for the last letter",
"def calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\" Calculates how many times each",
"\"../../input/14.txt\" Ruleset = dict[str, str] def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the",
"+ inserted] += count new_pair_counter[inserted + second] += count return new_pair_counter def calculate_answer(template:",
"second] += count return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\"",
"Counter(pairs) for _ in range(10): pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter)",
"rule in rules) return (template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]:",
"inserted] += count new_pair_counter[inserted + second] += count return new_pair_counter def calculate_answer(template: str,",
"dict[str, str] def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the input and returns",
"of the original template (which does not change), then subtracts the lowest count",
"\"\"\" new_pair_counter: Counter[str] = Counter() for pair, count in pair_counter.items(): inserted = ruleset[pair]",
"polymer template and the pair insertion rules \"\"\" with open(INPUT_FILE) as f: template,",
"+= count new_pair_counter[inserted + second] += count return new_pair_counter def calculate_answer(template: str, pair_counter:",
"= f.read().splitlines() ruleset = dict(rule.split(\" -> \") for rule in rules) return (template,",
"ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a single step",
"Ruleset = dict[str, str] def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the input",
"change), then subtracts the lowest count from the highest count and returns the",
"count from the highest count and returns the answer \"\"\" letter_counter = Counter(template[-1])",
"returns the polymer template and the pair insertion rules \"\"\" with open(INPUT_FILE) as",
"subtracts the lowest count from the highest count and returns the answer \"\"\"",
"-> Counter[str]: \"\"\" Applies a single step to the given pair_counter \"\"\" new_pair_counter:",
"Ruleset]: \"\"\" Parses the input and returns the polymer template and the pair",
"(part1, part2) if __name__ == \"__main__\": template, ruleset = parse_input() part1, part2 =",
"+ second] += count return new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) -> int:",
"Counter INPUT_FILE = \"../../input/14.txt\" Ruleset = dict[str, str] def parse_input() -> tuple[str, Ruleset]:",
"= (\"\".join(pair) for pair in zip(template, template[1:])) pair_counter = Counter(pairs) for _ in",
"comes first and 1 for the last letter of the original template (which",
"rules \"\"\" pairs = (\"\".join(pair) for pair in zip(template, template[1:])) pair_counter = Counter(pairs)",
"dict(rule.split(\" -> \") for rule in rules) return (template, ruleset) def step(ruleset: Ruleset,",
"count in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] += count return max(letter_counter.values()) -",
"max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates",
"in rules) return (template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\"",
"= pair new_pair_counter[first + inserted] += count new_pair_counter[inserted + second] += count return",
"does not change), then subtracts the lowest count from the highest count and",
"the answer \"\"\" letter_counter = Counter(template[-1]) for pair, count in pair_counter.items(): first_letter, _",
"pair, count in pair_counter.items(): inserted = ruleset[pair] first, second = pair new_pair_counter[first +",
"pair, count in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] += count return max(letter_counter.values())",
"for pair, count in pair_counter.items(): inserted = ruleset[pair] first, second = pair new_pair_counter[first",
"pair letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset: Ruleset)",
"part1 = calculate_answer(template, pair_counter) for _ in range(30): pair_counter = step(ruleset, pair_counter) part2",
"template (which does not change), then subtracts the lowest count from the highest",
"str] def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the input and returns the",
"rules) return (template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies",
"for _ in range(10): pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template, pair_counter) for",
"occurs by adding the counts of pairs where the given letter comes first",
"= pair letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template: str, ruleset:",
"= ruleset[pair] first, second = pair new_pair_counter[first + inserted] += count new_pair_counter[inserted +",
"solve(template: str, ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates the required answers given",
"int: \"\"\" Calculates how many times each letter occurs by adding the counts",
"of pairs where the given letter comes first and 1 for the last",
"as f: template, _, *rules = f.read().splitlines() ruleset = dict(rule.split(\" -> \") for",
"= dict[str, str] def parse_input() -> tuple[str, Ruleset]: \"\"\" Parses the input and",
"pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values()) def",
"pair in zip(template, template[1:])) pair_counter = Counter(pairs) for _ in range(10): pair_counter =",
"_, *rules = f.read().splitlines() ruleset = dict(rule.split(\" -> \") for rule in rules)",
"f.read().splitlines() ruleset = dict(rule.split(\" -> \") for rule in rules) return (template, ruleset)",
"= Counter(pairs) for _ in range(10): pair_counter = step(ruleset, pair_counter) part1 = calculate_answer(template,",
"return (template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a",
"\"\"\" Calculates how many times each letter occurs by adding the counts of",
"open(INPUT_FILE) as f: template, _, *rules = f.read().splitlines() ruleset = dict(rule.split(\" -> \")",
"str, ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates the required answers given the",
"Ruleset) -> tuple[int, int]: \"\"\" Calculates the required answers given the original template",
"count and returns the answer \"\"\" letter_counter = Counter(template[-1]) for pair, count in",
"letter_counter = Counter(template[-1]) for pair, count in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter]",
"insertion rules \"\"\" with open(INPUT_FILE) as f: template, _, *rules = f.read().splitlines() ruleset",
"and returns the answer \"\"\" letter_counter = Counter(template[-1]) for pair, count in pair_counter.items():",
"where the given letter comes first and 1 for the last letter of",
"Ruleset, pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a single step to the given",
"input and returns the polymer template and the pair insertion rules \"\"\" with",
"new_pair_counter def calculate_answer(template: str, pair_counter: Counter[str]) -> int: \"\"\" Calculates how many times",
"__name__ == \"__main__\": template, ruleset = parse_input() part1, part2 = solve(template, ruleset) print(part1)",
"tuple[int, int]: \"\"\" Calculates the required answers given the original template and the",
"then subtracts the lowest count from the highest count and returns the answer",
"\"\"\" pairs = (\"\".join(pair) for pair in zip(template, template[1:])) pair_counter = Counter(pairs) for",
"== \"__main__\": template, ruleset = parse_input() part1, part2 = solve(template, ruleset) print(part1) print(part2)",
"for rule in rules) return (template, ruleset) def step(ruleset: Ruleset, pair_counter: Counter[str]) ->",
"Calculates how many times each letter occurs by adding the counts of pairs",
"and the pair insertion rules \"\"\" with open(INPUT_FILE) as f: template, _, *rules",
"_ in range(30): pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter) return (part1,",
"template and the pair insertion rules \"\"\" with open(INPUT_FILE) as f: template, _,",
"second = pair new_pair_counter[first + inserted] += count new_pair_counter[inserted + second] += count",
"str, pair_counter: Counter[str]) -> int: \"\"\" Calculates how many times each letter occurs",
"the last letter of the original template (which does not change), then subtracts",
"(\"\".join(pair) for pair in zip(template, template[1:])) pair_counter = Counter(pairs) for _ in range(10):",
"first, second = pair new_pair_counter[first + inserted] += count new_pair_counter[inserted + second] +=",
"pair_counter) part1 = calculate_answer(template, pair_counter) for _ in range(30): pair_counter = step(ruleset, pair_counter)",
"letter of the original template (which does not change), then subtracts the lowest",
"the original template (which does not change), then subtracts the lowest count from",
"first_letter, _ = pair letter_counter[first_letter] += count return max(letter_counter.values()) - min(letter_counter.values()) def solve(template:",
"= Counter() for pair, count in pair_counter.items(): inserted = ruleset[pair] first, second =",
"pair new_pair_counter[first + inserted] += count new_pair_counter[inserted + second] += count return new_pair_counter",
"pair_counter) for _ in range(30): pair_counter = step(ruleset, pair_counter) part2 = calculate_answer(template, pair_counter)",
"adding the counts of pairs where the given letter comes first and 1",
"pair_counter: Counter[str]) -> Counter[str]: \"\"\" Applies a single step to the given pair_counter",
"from collections import Counter INPUT_FILE = \"../../input/14.txt\" Ruleset = dict[str, str] def parse_input()",
"step to the given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for pair, count",
"pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for pair, count in pair_counter.items(): inserted =",
"letter comes first and 1 for the last letter of the original template",
"Counter[str] = Counter() for pair, count in pair_counter.items(): inserted = ruleset[pair] first, second",
"1 for the last letter of the original template (which does not change),",
"pair_counter) part2 = calculate_answer(template, pair_counter) return (part1, part2) if __name__ == \"__main__\": template,",
"last letter of the original template (which does not change), then subtracts the",
"= Counter(template[-1]) for pair, count in pair_counter.items(): first_letter, _ = pair letter_counter[first_letter] +=",
"\"\"\" Calculates the required answers given the original template and the pair insertion",
"part2 = calculate_answer(template, pair_counter) return (part1, part2) if __name__ == \"__main__\": template, ruleset",
"and returns the polymer template and the pair insertion rules \"\"\" with open(INPUT_FILE)",
"def solve(template: str, ruleset: Ruleset) -> tuple[int, int]: \"\"\" Calculates the required answers",
"a single step to the given pair_counter \"\"\" new_pair_counter: Counter[str] = Counter() for",
"pair_counter = Counter(pairs) for _ in range(10): pair_counter = step(ruleset, pair_counter) part1 ="
] |
[
"encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\")",
"options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True) -> Dict: \"\"\"",
"= _check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request)",
"return _check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\" Allow access only to an",
"= authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id,",
"returning current authenticated user. :param request: web request :return: current user, otherwise raise",
"\"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host }) return user except jwt.exceptions.ExpiredSignatureError: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)",
"HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return",
"Dict: \"\"\" Decode an encoded refresh token, with optional expiration check. :param encoded_refresh_token:",
"access only to an 'admin' account, returning current authenticated admin account data. :param",
"not :return: User object stored inside the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret,",
"key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True) ->",
":param verify_exp: whether to perform verification or not :return: User object stored inside",
"= extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\":",
"expiration check. :param access_token: encoded access token string :param verify_exp: whether to perform",
"access_token = authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\":",
"token string :param verify_exp: whether to perform verification or not :return: User object",
"authenticated admin account data. :param request: web request :return: current admin user, otherwise",
"= True) -> Dict: \"\"\" Decode an encoded refresh token, with optional expiration",
"encoded refresh token, with optional expiration check. :param encoded_refresh_token: encoded refresh token string",
"auth.models import User from config import cfg def get_user(request: Request) -> User: \"\"\"",
"encoded_refresh_token: encoded refresh token string :param verify_exp: whether to perform verification or not",
"string :param verify_exp: whether to perform verification or not :return: decoded jwt refresh",
"refresh token string :param verify_exp: whether to perform verification or not :return: decoded",
":param request: web request :return: current user, otherwise raise an HTTPException (status=401) \"\"\"",
"request: web request :return: current admin user, otherwise raise an HTTPException (status=401) \"\"\"",
"anonymous. :param request: web request :return: current user or None for anonymous sessions",
"True) -> Dict: \"\"\" Decode an encoded refresh token, with optional expiration check.",
"return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool",
"\"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host }) return user except jwt.exceptions.ExpiredSignatureError:",
":return: current admin user, otherwise raise an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request)",
":param request: web request :return: current admin user, otherwise raise an HTTPException (status=401)",
"raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\"",
"try: access_token = authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({",
"user def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated user or None if",
"jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header =",
"User: \"\"\" Extract User object from jwt token, with optional expiration check. :param",
"user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) -> Optional[User]: \"\"\"",
"User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool =",
"access, requiring and returning current authenticated user. :param request: web request :return: current",
"HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn:",
"-> User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token =",
":return: decoded jwt refresh token as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm],",
"import status from starlette.requests import Request from auth.models import Role from auth.models import",
"-> User: \"\"\" Protect route from anonymous access, requiring and returning current authenticated",
"verify_exp: whether to perform verification or not :return: User object stored inside the",
"def get_admin(request: Request) -> User: \"\"\" Allow access only to an 'admin' account,",
"\"\"\" Decode an encoded refresh token, with optional expiration check. :param encoded_refresh_token: encoded",
"check. :param encoded_refresh_token: encoded refresh token string :param verify_exp: whether to perform verification",
"string :param verify_exp: whether to perform verification or not :return: User object stored",
"user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host }) return user except jwt.exceptions.ExpiredSignatureError: raise",
"\"\"\" Protect route from anonymous access, requiring and returning current authenticated user. :param",
"-> Optional[User]: \"\"\" Return authenticated user or None if session is anonymous. :param",
"token string :param verify_exp: whether to perform verification or not :return: decoded jwt",
"str, verify_exp: bool = True) -> Dict: \"\"\" Decode an encoded refresh token,",
"Optional[User]: \"\"\" Return authenticated user or None if session is anonymous. :param request:",
"from auth.models import User from config import cfg def get_user(request: Request) -> User:",
"jwt token, with optional expiration check. :param access_token: encoded access token string :param",
"check. :param access_token: encoded access token string :param verify_exp: whether to perform verification",
"verify_exp: whether to perform verification or not :return: decoded jwt refresh token as",
"authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\":",
"or not :return: User object stored inside the jwt \"\"\" return User(**jwt.decode( access_token,",
"cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host }) return user",
"user, otherwise raise an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if user.role !=",
"verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise",
"Optional, Dict import jwt import sentry_sdk from fastapi import HTTPException from starlette import",
":return: current user or None for anonymous sessions \"\"\" try: return _check_and_extract_user(request) except",
"authenticated user or None if session is anonymous. :param request: web request :return:",
"typing import Optional, Dict import jwt import sentry_sdk from fastapi import HTTPException from",
":param request: web request :return: current user or None for anonymous sessions \"\"\"",
"or None for anonymous sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"):",
"jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str,",
"admin user, otherwise raise an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if user.role",
"\"\"\" user = _check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def",
"user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email,",
"admin account data. :param request: web request :return: current admin user, otherwise raise",
"user = _check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request:",
":return: current user, otherwise raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request:",
"def get_user(request: Request) -> User: \"\"\" Protect route from anonymous access, requiring and",
"User: \"\"\" Protect route from anonymous access, requiring and returning current authenticated user.",
"import HTTPException from starlette import status from starlette.requests import Request from auth.models import",
"import User from config import cfg def get_user(request: Request) -> User: \"\"\" Protect",
"sentry_sdk from fastapi import HTTPException from starlette import status from starlette.requests import Request",
"inside the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token(",
"raise def extract_user_from_token(access_token: str, verify_exp: bool = True) -> User: \"\"\" Extract User",
"\"\"\" Allow access only to an 'admin' account, returning current authenticated admin account",
"User object stored inside the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\":",
"import Optional, Dict import jwt import sentry_sdk from fastapi import HTTPException from starlette",
"current authenticated admin account data. :param request: web request :return: current admin user,",
"an 'admin' account, returning current authenticated admin account data. :param request: web request",
"raise an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if user.role != Role.ADMIN: raise",
"try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool",
"to perform verification or not :return: decoded jwt refresh token as dictionary \"\"\"",
"data. :param request: web request :return: current admin user, otherwise raise an HTTPException",
"refresh token as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def",
"from fastapi import HTTPException from starlette import status from starlette.requests import Request from",
":param encoded_refresh_token: encoded refresh token string :param verify_exp: whether to perform verification or",
"perform verification or not :return: decoded jwt refresh token as dictionary \"\"\" return",
") if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host })",
"user or None for anonymous sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException: if",
"if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\") user =",
"_check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) ->",
"refresh token, with optional expiration check. :param encoded_refresh_token: encoded refresh token string :param",
"Request) -> User: \"\"\" Protect route from anonymous access, requiring and returning current",
"def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True) -> Dict: \"\"\" Decode an",
"anonymous access, requiring and returning current authenticated user. :param request: web request :return:",
":param verify_exp: whether to perform verification or not :return: decoded jwt refresh token",
"bool = True) -> User: \"\"\" Extract User object from jwt token, with",
"request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\") user",
"web request :return: current user or None for anonymous sessions \"\"\" try: return",
"encoded_refresh_token: str, verify_exp: bool = True) -> Dict: \"\"\" Decode an encoded refresh",
"User object from jwt token, with optional expiration check. :param access_token: encoded access",
"(status=401) \"\"\" user = _check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user",
"as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request)",
"raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, ) if",
"_check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try:",
"get_admin(request: Request) -> User: \"\"\" Allow access only to an 'admin' account, returning",
"!= Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return",
"access_token: encoded access token string :param verify_exp: whether to perform verification or not",
"from auth.models import Role from auth.models import User from config import cfg def",
"\"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User:",
"None if session is anonymous. :param request: web request :return: current user or",
"\"\"\" Extract User object from jwt token, with optional expiration check. :param access_token:",
"_check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\" Allow access only to an 'admin'",
"get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated user or None if session is",
"object from jwt token, with optional expiration check. :param access_token: encoded access token",
"\"\"\" return _check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\" Allow access only to",
"from typing import Optional, Dict import jwt import sentry_sdk from fastapi import HTTPException",
"for anonymous sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def",
"otherwise raise an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if user.role != Role.ADMIN:",
"from starlette.requests import Request from auth.models import Role from auth.models import User from",
"HTTPException from starlette import status from starlette.requests import Request from auth.models import Role",
"Role from auth.models import User from config import cfg def get_user(request: Request) ->",
"verification or not :return: decoded jwt refresh token as dictionary \"\"\" return jwt.decode(",
"return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header",
"encoded access token string :param verify_exp: whether to perform verification or not :return:",
"anonymous sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token:",
":param access_token: encoded access token string :param verify_exp: whether to perform verification or",
"optional expiration check. :param encoded_refresh_token: encoded refresh token string :param verify_exp: whether to",
"-> User: \"\"\" Extract User object from jwt token, with optional expiration check.",
"True) -> User: \"\"\" Extract User object from jwt token, with optional expiration",
"= request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\")",
"HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\" Allow access",
"authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \",",
"extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host",
"User from config import cfg def get_user(request: Request) -> User: \"\"\" Protect route",
"otherwise raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request) -> User:",
"def extract_user_from_token(access_token: str, verify_exp: bool = True) -> User: \"\"\" Extract User object",
"str, verify_exp: bool = True) -> User: \"\"\" Extract User object from jwt",
"key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\") if",
"get_user(request: Request) -> User: \"\"\" Protect route from anonymous access, requiring and returning",
"whether to perform verification or not :return: decoded jwt refresh token as dictionary",
"jwt import sentry_sdk from fastapi import HTTPException from starlette import status from starlette.requests",
"Request from auth.models import Role from auth.models import User from config import cfg",
"except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool = True) ->",
"access token string :param verify_exp: whether to perform verification or not :return: User",
"fastapi import HTTPException from starlette import status from starlette.requests import Request from auth.models",
"raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated user",
"user, otherwise raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request) ->",
"is anonymous. :param request: web request :return: current user or None for anonymous",
"if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host }) return",
"-> Dict: \"\"\" Decode an encoded refresh token, with optional expiration check. :param",
"algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True) -> Dict:",
"to perform verification or not :return: User object stored inside the jwt \"\"\"",
"or None if session is anonymous. :param request: web request :return: current user",
"Protect route from anonymous access, requiring and returning current authenticated user. :param request:",
"web request :return: current admin user, otherwise raise an HTTPException (status=401) \"\"\" user",
"status from starlette.requests import Request from auth.models import Role from auth.models import User",
"'admin' account, returning current authenticated admin account data. :param request: web request :return:",
"verification or not :return: User object stored inside the jwt \"\"\" return User(**jwt.decode(",
"an encoded refresh token, with optional expiration check. :param encoded_refresh_token: encoded refresh token",
"extract_user_from_token(access_token: str, verify_exp: bool = True) -> User: \"\"\" Extract User object from",
"account data. :param request: web request :return: current admin user, otherwise raise an",
"sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str,",
"-> User: \"\"\" Allow access only to an 'admin' account, returning current authenticated",
"User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer",
"web request :return: current user, otherwise raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request)",
"request: web request :return: current user, otherwise raise an HTTPException (status=401) \"\"\" return",
"bool = True) -> Dict: \"\"\" Decode an encoded refresh token, with optional",
"decoded jwt refresh token as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\":",
"from jwt token, with optional expiration check. :param access_token: encoded access token string",
"authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token, )",
"request :return: current admin user, otherwise raise an HTTPException (status=401) \"\"\" user =",
"route from anonymous access, requiring and returning current authenticated user. :param request: web",
"user or None if session is anonymous. :param request: web request :return: current",
"user. :param request: web request :return: current user, otherwise raise an HTTPException (status=401)",
"stored inside the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def",
"with optional expiration check. :param access_token: encoded access token string :param verify_exp: whether",
"\"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\":",
"and returning current authenticated user. :param request: web request :return: current user, otherwise",
"Request) -> Optional[User]: \"\"\" Return authenticated user or None if session is anonymous.",
"current user, otherwise raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request)",
"an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\" Allow",
"\", \"\") user = extract_user_from_token(access_token, ) if cfg.sentry_dsn: sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username,",
"verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True) -> Dict: \"\"\" Decode",
"HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated user or",
"account, returning current authenticated admin account data. :param request: web request :return: current",
"to an 'admin' account, returning current authenticated admin account data. :param request: web",
"only to an 'admin' account, returning current authenticated admin account data. :param request:",
"Request) -> User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token",
"requiring and returning current authenticated user. :param request: web request :return: current user,",
"returning current authenticated admin account data. :param request: web request :return: current admin",
"Decode an encoded refresh token, with optional expiration check. :param encoded_refresh_token: encoded refresh",
"current admin user, otherwise raise an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if",
"dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) ->",
"verify_exp: bool = True) -> User: \"\"\" Extract User object from jwt token,",
"access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True)",
"from anonymous access, requiring and returning current authenticated user. :param request: web request",
"HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool = True) -> User:",
"(status=401) \"\"\" return _check_and_extract_user(request) def get_admin(request: Request) -> User: \"\"\" Allow access only",
":return: User object stored inside the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm],",
"starlette import status from starlette.requests import Request from auth.models import Role from auth.models",
"request :return: current user, otherwise raise an HTTPException (status=401) \"\"\" return _check_and_extract_user(request) def",
"Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated",
"from starlette import status from starlette.requests import Request from auth.models import Role from",
"token, with optional expiration check. :param access_token: encoded access token string :param verify_exp:",
"jwt refresh token as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})",
"if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) return user def get_optional_user(request: Request) -> Optional[User]:",
"def _check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)",
"optional expiration check. :param access_token: encoded access token string :param verify_exp: whether to",
"current user or None for anonymous sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException:",
"verify_exp: bool = True) -> Dict: \"\"\" Decode an encoded refresh token, with",
"expiration check. :param encoded_refresh_token: encoded refresh token string :param verify_exp: whether to perform",
"session is anonymous. :param request: web request :return: current user or None for",
"decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp: bool = True) -> Dict: \"\"\" Decode an encoded",
"if session is anonymous. :param request: web request :return: current user or None",
"if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool = True) -> User: \"\"\"",
"current authenticated user. :param request: web request :return: current user, otherwise raise an",
"User: \"\"\" Allow access only to an 'admin' account, returning current authenticated admin",
"None for anonymous sessions \"\"\" try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise",
"Allow access only to an 'admin' account, returning current authenticated admin account data.",
"= True) -> User: \"\"\" Extract User object from jwt token, with optional",
"the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token:",
"request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool = True) -> User: \"\"\" Extract",
"\"\"\" try: return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp:",
"perform verification or not :return: User object stored inside the jwt \"\"\" return",
"Dict import jwt import sentry_sdk from fastapi import HTTPException from starlette import status",
"encoded refresh token string :param verify_exp: whether to perform verification or not :return:",
"token, with optional expiration check. :param encoded_refresh_token: encoded refresh token string :param verify_exp:",
"\"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"]) def decode_jwt_refresh_token( encoded_refresh_token: str, verify_exp:",
"import sentry_sdk from fastapi import HTTPException from starlette import status from starlette.requests import",
"options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\") if not authorization_header:",
"not authorization_header: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED) try: access_token = authorization_header.replace(\"Bearer \", \"\") user = extract_user_from_token(access_token,",
"import jwt import sentry_sdk from fastapi import HTTPException from starlette import status from",
"whether to perform verification or not :return: User object stored inside the jwt",
"not :return: decoded jwt refresh token as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret,",
"config import cfg def get_user(request: Request) -> User: \"\"\" Protect route from anonymous",
"request :return: current user or None for anonymous sessions \"\"\" try: return _check_and_extract_user(request)",
"token as dictionary \"\"\" return jwt.decode( encoded_refresh_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request:",
"import Request from auth.models import Role from auth.models import User from config import",
"Extract User object from jwt token, with optional expiration check. :param access_token: encoded",
"import cfg def get_user(request: Request) -> User: \"\"\" Protect route from anonymous access,",
"request: web request :return: current user or None for anonymous sessions \"\"\" try:",
"object stored inside the jwt \"\"\" return User(**jwt.decode( access_token, key=cfg.jwt_secret, algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp})[\"user\"])",
"import Role from auth.models import User from config import cfg def get_user(request: Request)",
"an HTTPException (status=401) \"\"\" user = _check_and_extract_user(request) if user.role != Role.ADMIN: raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED)",
"starlette.requests import Request from auth.models import Role from auth.models import User from config",
"or not :return: decoded jwt refresh token as dictionary \"\"\" return jwt.decode( encoded_refresh_token,",
"algorithms=[cfg.jwt_algorithm], options={\"verify_exp\": verify_exp}) def _check_and_extract_user(request: Request) -> User: authorization_header = request.headers.get(\"Authorization\") if not",
"return _check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool =",
"Request) -> User: \"\"\" Allow access only to an 'admin' account, returning current",
"Return authenticated user or None if session is anonymous. :param request: web request",
"\"\"\" Return authenticated user or None if session is anonymous. :param request: web",
"sentry_sdk.set_user({ \"id\": user.id, \"username\": user.username, \"email\": user.email, \"ip_address\": request.client.host }) return user except",
"auth.models import Role from auth.models import User from config import cfg def get_user(request:",
"return user def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated user or None",
"cfg def get_user(request: Request) -> User: \"\"\" Protect route from anonymous access, requiring",
"with optional expiration check. :param encoded_refresh_token: encoded refresh token string :param verify_exp: whether",
"_check_and_extract_user(request) except HTTPException: if request.headers.get(\"Authorization\"): raise def extract_user_from_token(access_token: str, verify_exp: bool = True)",
"from config import cfg def get_user(request: Request) -> User: \"\"\" Protect route from",
"def get_optional_user(request: Request) -> Optional[User]: \"\"\" Return authenticated user or None if session",
"authenticated user. :param request: web request :return: current user, otherwise raise an HTTPException"
] |
[
"# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you",
"writing, software # distributed under the License is distributed on an \"AS IS\"",
"filenames in os.walk(os.path.join(image_path)): for filename in filenames: for ext in exts: if filename.endswith(ext):",
"numpy as np import argparse import random import os from PIL import Image",
"KIND, either express or implied. # See the License for the specific language",
"input bin file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts =",
"Unless required by applicable law or agreed to in writing, software # distributed",
"try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) +",
"\" + str(im_width) + \" \" + str(im_height) ) f.write('\\n') except: print(\"Error reading",
"You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #",
"the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR",
"# See the License for the specific language governing permissions and # limitations",
"img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i = 0",
"License. # You may obtain a copy of the License at # #",
"= ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename",
"img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height =",
"img_src.size f.write(str(i) + \" \" + img_name + \" \" + str(im_width) +",
"law or agreed to in writing, software # distributed under the License is",
"test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\",",
"the License for the specific language governing permissions and # limitations under the",
"args = parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts = ['jpg', 'png', 'jpeg',",
"exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for",
"compliance with the License. # You may obtain a copy of the License",
"= argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of",
"of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference input bin file.\")",
"on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,",
"= 0 for img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn)",
"image {}!\".format(im_fn)) continue i = i + 1 f.close() if __name__ == '__main__':",
"this file except in compliance with the License. # You may obtain a",
"the Apache License, Version 2.0 (the \"License\"); # you may not use this",
"'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename in filenames: for ext",
"type=str, default=\"./acl/data\", help=\"The path of inference input bin file.\") args = parser.parse_args() def",
"you may not use this file except in compliance with the License. #",
"for the specific language governing permissions and # limitations under the License. import",
"Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str,",
"filename)) break print('Find {} images'.format(len(img_files))) return img_files def main(): img_info_path = args.save_conf_path img_path",
"args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i =",
"img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) + \" \" +",
"+ img_name + \" \" + str(im_width) + \" \" + str(im_height) )",
"of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable",
"default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference",
"parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path",
"= img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) + \" \"",
"parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference input bin file.\") args = parser.parse_args()",
"return img_files def main(): img_info_path = args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path)",
"import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\",",
"Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0",
"the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference input bin file.\") args",
"['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename in",
"the License. import numpy as np import argparse import random import os from",
"ANY KIND, either express or implied. # See the License for the specific",
"exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return img_files def main():",
"import argparse import random import os from PIL import Image import PIL parser",
"os from PIL import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image",
"0 for img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width",
"def get_reasoning_data(image_path): img_files = [] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent,",
"<gh_stars>10-100 # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the",
"language governing permissions and # limitations under the License. import numpy as np",
"os.walk(os.path.join(image_path)): for filename in filenames: for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename))",
"in compliance with the License. # You may obtain a copy of the",
"import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\",",
"License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or",
"import random import os from PIL import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet",
"as np import argparse import random import os from PIL import Image import",
"# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #",
"random import os from PIL import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test",
"use this file except in compliance with the License. # You may obtain",
"at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed",
"single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\",",
"in filenames: for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {}",
"limitations under the License. import numpy as np import argparse import random import",
"not use this file except in compliance with the License. # You may",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See",
"filenames: for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files)))",
"img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return img_files def main(): img_info_path = args.save_conf_path",
"img_info_path = args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\")",
"f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue i = i + 1 f.close()",
"See the License for the specific language governing permissions and # limitations under",
"= get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i = 0 for img_fn in img_list:",
"BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
"License, Version 2.0 (the \"License\"); # you may not use this file except",
"# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may",
"img_files def main(): img_info_path = args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f",
"IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or",
"for filename in filenames: for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break",
"+ str(im_width) + \" \" + str(im_height) ) f.write('\\n') except: print(\"Error reading image",
"a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required",
"f.write(str(i) + \" \" + img_name + \" \" + str(im_width) + \"",
"argparse import random import os from PIL import Image import PIL parser =",
"distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY",
"# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in",
"= [] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in",
"for img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height",
"OF ANY KIND, either express or implied. # See the License for the",
"2.0 (the \"License\"); # you may not use this file except in compliance",
"print('Find {} images'.format(len(img_files))) return img_files def main(): img_info_path = args.save_conf_path img_path = args.intput_file_path",
"np import argparse import random import os from PIL import Image import PIL",
"# you may not use this file except in compliance with the License.",
"result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference input bin file.\") args =",
"of inference input bin file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files = []",
"str(im_height) ) f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue i = i +",
"agreed to in writing, software # distributed under the License is distributed on",
"if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return img_files def main(): img_info_path",
"+ \" \" + img_name + \" \" + str(im_width) + \" \"",
"WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the",
"type=str, default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of",
"{}!\".format(im_fn)) continue i = i + 1 f.close() if __name__ == '__main__': main()",
"parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path",
"= parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG']",
"reading image {}!\".format(im_fn)) continue i = i + 1 f.close() if __name__ ==",
"(the \"License\"); # you may not use this file except in compliance with",
"path of inference input bin file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files =",
"print(\"Error reading image {}!\".format(im_fn)) continue i = i + 1 f.close() if __name__",
"\" \" + str(im_width) + \" \" + str(im_height) ) f.write('\\n') except: print(\"Error",
"img_src = Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) + \" \" + img_name",
"and # limitations under the License. import numpy as np import argparse import",
"\" + img_name + \" \" + str(im_width) + \" \" + str(im_height)",
"dirnames, filenames in os.walk(os.path.join(image_path)): for filename in filenames: for ext in exts: if",
"# # Unless required by applicable law or agreed to in writing, software",
"+ str(im_height) ) f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue i = i",
"= Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) + \" \" + img_name +",
"parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for",
"\" + str(im_height) ) f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue i =",
"express or implied. # See the License for the specific language governing permissions",
"Version 2.0 (the \"License\"); # you may not use this file except in",
"# Unless required by applicable law or agreed to in writing, software #",
"filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return img_files def main(): img_info_path =",
"except in compliance with the License. # You may obtain a copy of",
"def main(): img_info_path = args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f =",
"= args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i = 0 for",
"by applicable law or agreed to in writing, software # distributed under the",
"import os from PIL import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single",
"except: print(\"Error reading image {}!\".format(im_fn)) continue i = i + 1 f.close() if",
"for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return",
"copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by",
"import numpy as np import argparse import random import os from PIL import",
"either express or implied. # See the License for the specific language governing",
"Co., Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\");",
"file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts = ['jpg', 'png',",
"software # distributed under the License is distributed on an \"AS IS\" BASIS,",
"main(): img_info_path = args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path,",
"# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0",
"may not use this file except in compliance with the License. # You",
"License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS",
"image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str,",
"# limitations under the License. import numpy as np import argparse import random",
"get_reasoning_data(image_path): img_files = [] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames,",
"Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not",
"an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either",
"# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to",
"under the License. import numpy as np import argparse import random import os",
"specific language governing permissions and # limitations under the License. import numpy as",
"default=\"./acl/data\", help=\"The path of inference input bin file.\") args = parser.parse_args() def get_reasoning_data(image_path):",
"procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The",
"test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the result.json.\")",
"i = 0 for img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src =",
"ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return img_files",
"{} images'.format(len(img_files))) return img_files def main(): img_info_path = args.save_conf_path img_path = args.intput_file_path img_list",
"file except in compliance with the License. # You may obtain a copy",
"2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version",
"filename in filenames: for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find",
"img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) + \"",
"= img_src.size f.write(str(i) + \" \" + img_name + \" \" + str(im_width)",
"break print('Find {} images'.format(len(img_files))) return img_files def main(): img_info_path = args.save_conf_path img_path =",
"Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i) + \" \" + img_name + \"",
"under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES",
"f = open(img_info_path, \"w+\") i = 0 for img_fn in img_list: try: img_name",
"in exts: if filename.endswith(ext): img_files.append(os.path.join(parent, filename)) break print('Find {} images'.format(len(img_files))) return img_files def",
"License for the specific language governing permissions and # limitations under the License.",
"the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law",
"'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename in filenames: for",
"img_name + \" \" + str(im_width) + \" \" + str(im_height) ) f.write('\\n')",
"the License. # You may obtain a copy of the License at #",
"# Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache",
"to in writing, software # distributed under the License is distributed on an",
"\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express",
"[] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)):",
"# distributed under the License is distributed on an \"AS IS\" BASIS, #",
"implied. # See the License for the specific language governing permissions and #",
"bin file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts = ['jpg',",
"\"License\"); # you may not use this file except in compliance with the",
"is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF",
"obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless",
"required by applicable law or agreed to in writing, software # distributed under",
"argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The path of the",
"path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference input bin",
"Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License,",
"License. import numpy as np import argparse import random import os from PIL",
"from PIL import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test",
"applicable law or agreed to in writing, software # distributed under the License",
"img_files = [] exts = ['jpg', 'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames",
"parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename in filenames: for ext in exts:",
"open(img_info_path, \"w+\") i = 0 for img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0]",
",im_height = img_src.size f.write(str(i) + \" \" + img_name + \" \" +",
"Ltd # # Licensed under the Apache License, Version 2.0 (the \"License\"); #",
"\" \" + str(im_height) ) f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue i",
"\"w+\") i = 0 for img_fn in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src",
"in os.walk(os.path.join(image_path)): for filename in filenames: for ext in exts: if filename.endswith(ext): img_files.append(os.path.join(parent,",
"Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the",
"get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i = 0 for img_fn in img_list: try:",
"governing permissions and # limitations under the License. import numpy as np import",
"img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i = 0 for img_fn in",
"help=\"The path of the result.json.\") parser.add_argument(\"--intput_file_path\", type=str, default=\"./acl/data\", help=\"The path of inference input",
"img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height = img_src.size f.write(str(i)",
"or agreed to in writing, software # distributed under the License is distributed",
"'png', 'jpeg', 'JPG','JPEG'] for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename in filenames:",
"for parent, dirnames, filenames in os.walk(os.path.join(image_path)): for filename in filenames: for ext in",
"= open(img_info_path, \"w+\") i = 0 for img_fn in img_list: try: img_name =",
"PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\") parser.add_argument(\"--save_conf_path\", type=str, default=\"./img_info\", help=\"The",
"or implied. # See the License for the specific language governing permissions and",
"+ \" \" + str(im_width) + \" \" + str(im_height) ) f.write('\\n') except:",
"+ \" \" + str(im_height) ) f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue",
"= args.save_conf_path img_path = args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i",
"distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT",
"CONDITIONS OF ANY KIND, either express or implied. # See the License for",
"Apache License, Version 2.0 (the \"License\"); # you may not use this file",
"OR CONDITIONS OF ANY KIND, either express or implied. # See the License",
"may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #",
"PIL import Image import PIL parser = argparse.ArgumentParser(description=\"SSD_mobilenet test single image test procedure.\")",
"images'.format(len(img_files))) return img_files def main(): img_info_path = args.save_conf_path img_path = args.intput_file_path img_list =",
"in img_list: try: img_name = img_fn.split(\"/\")[-1].split(\".\")[0] img_src = Image.open(img_fn) im_width ,im_height = img_src.size",
"str(im_width) + \" \" + str(im_height) ) f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn))",
"args.intput_file_path img_list = get_reasoning_data(img_path) f = open(img_info_path, \"w+\") i = 0 for img_fn",
") f.write('\\n') except: print(\"Error reading image {}!\".format(im_fn)) continue i = i + 1",
"the specific language governing permissions and # limitations under the License. import numpy",
"with the License. # You may obtain a copy of the License at",
"http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,",
"in writing, software # distributed under the License is distributed on an \"AS",
"help=\"The path of inference input bin file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files",
"im_width ,im_height = img_src.size f.write(str(i) + \" \" + img_name + \" \"",
"\" \" + img_name + \" \" + str(im_width) + \" \" +",
"under the Apache License, Version 2.0 (the \"License\"); # you may not use",
"inference input bin file.\") args = parser.parse_args() def get_reasoning_data(image_path): img_files = [] exts",
"permissions and # limitations under the License. import numpy as np import argparse"
] |
[
"use_node=use_node ) print(result[1]) def is_enabled(self, **args) : view = self.view if not util.selection_in_js_scope(view)",
"or not DEVELOPER_MODE : return False return True def is_visible(self, **args) : view",
"..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view = self.view flow_cli",
"import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view = self.view flow_cli =",
"self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False return True def",
"* class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view = self.view flow_cli = \"flow\"",
"True def is_visible(self, **args) : view = self.view if not util.selection_in_js_scope(view) or not",
"import NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def",
"is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args) : view = self.view",
"flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir,",
"not DEVELOPER_MODE : return False return True def is_visible(self, **args) : view =",
"'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1])",
"..libs import util from ..libs import NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars",
"'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node",
"True chdir = \"\" use_node = True bin_path = \"\" node = NodeJS(check_local=True)",
"util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False return True def is_visible(self, **args) :",
"], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self,",
"node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False,",
"'--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node )",
"..libs import NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand):",
"False return True def is_visible(self, **args) : view = self.view if not util.selection_in_js_scope(view)",
"= True bin_path = \"\" node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [",
"sublime_plugin import os from ..libs import util from ..libs import NodeJS from ..libs",
"os from ..libs import util from ..libs import NodeJS from ..libs import javaScriptEnhancements",
"view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args) : view =",
"from ..libs import util from ..libs import NodeJS from ..libs import javaScriptEnhancements from",
"view = self.view flow_cli = \"flow\" is_from_bin = True chdir = \"\" use_node",
"bin_path = \"\" node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast', '--from',",
"NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self,",
"\"flow\" is_from_bin = True chdir = \"\" use_node = True bin_path = \"\"",
"result = node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0,",
"is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args)",
"chdir = \"\" use_node = True bin_path = \"\" node = NodeJS(check_local=True) result",
"= NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin,",
"DEVELOPER_MODE : return False return True def is_visible(self, **args) : view = self.view",
"sublime, sublime_plugin import os from ..libs import util from ..libs import NodeJS from",
"import os from ..libs import util from ..libs import NodeJS from ..libs import",
"flow_cli = \"flow\" is_from_bin = True chdir = \"\" use_node = True bin_path",
"import sublime, sublime_plugin import os from ..libs import util from ..libs import NodeJS",
"is_from_bin = True chdir = \"\" use_node = True bin_path = \"\" node",
"def is_visible(self, **args) : view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE",
"if not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False return True def is_visible(self,",
"is_visible(self, **args) : view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE :",
"return True def is_visible(self, **args) : view = self.view if not util.selection_in_js_scope(view) or",
"NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True,",
"self.view flow_cli = \"flow\" is_from_bin = True chdir = \"\" use_node = True",
"'--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def",
"import util from ..libs import NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars import",
"node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ],",
"True bin_path = \"\" node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast',",
"from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view = self.view",
"= \"\" node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text',",
"print(result[1]) def is_enabled(self, **args) : view = self.view if not util.selection_in_js_scope(view) or not",
"util from ..libs import NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars import *",
"edit, **args): view = self.view flow_cli = \"flow\" is_from_bin = True chdir =",
"from ..libs import javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit,",
"JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view = self.view flow_cli = \"flow\" is_from_bin =",
") print(result[1]) def is_enabled(self, **args) : view = self.view if not util.selection_in_js_scope(view) or",
"chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args) : view = self.view if",
"bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args) : view = self.view if not",
"return False return True def is_visible(self, **args) : view = self.view if not",
"= self.view flow_cli = \"flow\" is_from_bin = True chdir = \"\" use_node =",
"..libs import javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args):",
"**args) : view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return",
"use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args) :",
": return False return True def is_visible(self, **args) : view = self.view if",
"= node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())),",
"javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view =",
"= \"\" use_node = True bin_path = \"\" node = NodeJS(check_local=True) result =",
"**args): view = self.view flow_cli = \"flow\" is_from_bin = True chdir = \"\"",
"\"\" use_node = True bin_path = \"\" node = NodeJS(check_local=True) result = node.execute_check_output(",
"view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False return",
"\"\" node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli, [ 'ast', '--from', 'sublime_text', '--pretty'",
"= self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False return True",
"= True chdir = \"\" use_node = True bin_path = \"\" node =",
": view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False",
"fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path, use_node=use_node ) print(result[1]) def is_enabled(self, **args) : view",
"import javaScriptEnhancements from ..libs.global_vars import * class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view",
"use_node = True bin_path = \"\" node = NodeJS(check_local=True) result = node.execute_check_output( flow_cli,",
"not util.selection_in_js_scope(view) or not DEVELOPER_MODE : return False return True def is_visible(self, **args)",
"= \"flow\" is_from_bin = True chdir = \"\" use_node = True bin_path =",
"class JavascriptEnhancementsGetAstCommand(sublime_plugin.TextCommand): def run(self, edit, **args): view = self.view flow_cli = \"flow\" is_from_bin",
"is_enabled(self, **args) : view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE :",
"from ..libs import NodeJS from ..libs import javaScriptEnhancements from ..libs.global_vars import * class",
"run(self, edit, **args): view = self.view flow_cli = \"flow\" is_from_bin = True chdir",
"def is_enabled(self, **args) : view = self.view if not util.selection_in_js_scope(view) or not DEVELOPER_MODE",
"[ 'ast', '--from', 'sublime_text', '--pretty' ], is_from_bin=is_from_bin, use_fp_temp=True, fp_temp_contents=view.substr(sublime.Region(0, view.size())), is_output_json=False, chdir=chdir, bin_path=bin_path,",
"def run(self, edit, **args): view = self.view flow_cli = \"flow\" is_from_bin = True"
] |
[
"\"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates of dye/peptide diffusion coefficients",
"of dye/peptide diffusion coefficients and loss rates from a time-sequence of fluorescence images.\",",
"author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates of dye/peptide diffusion coefficients and loss",
"], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates of dye/peptide diffusion coefficients and",
"long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python :: 3\", \"Operating System",
"= fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\",",
"and loss rates from a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(),",
"\"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for",
"\"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates of dye/peptide",
"as fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\",",
"\"r\") as fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\",",
"\"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python",
"package for extract estimates of dye/peptide diffusion coefficients and loss rates from a",
"extract estimates of dye/peptide diffusion coefficients and loss rates from a time-sequence of",
"open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\",",
"long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python :: 3\", \"Operating System ::",
"setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\",",
"\"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates of dye/peptide diffusion",
"install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\",",
"url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python :: 3\", \"Operating System :: OS",
"time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python",
"rates from a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming",
"classifiers=[ \"Programming Language :: Python :: 3\", \"Operating System :: OS Independent\", ],",
"dye/peptide diffusion coefficients and loss rates from a time-sequence of fluorescence images.\", long_description=long_description,",
"\"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract",
"author_email=\"<EMAIL>\", description=\"Python package for extract estimates of dye/peptide diffusion coefficients and loss rates",
"images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python :: 3\", \"Operating",
"estimates of dye/peptide diffusion coefficients and loss rates from a time-sequence of fluorescence",
"fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python :: 3\",",
"diffusion coefficients and loss rates from a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\",",
"\"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package",
"setuptools with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\",",
"version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ],",
"\"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates of",
"loss rates from a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[",
"name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\",",
"description=\"Python package for extract estimates of dye/peptide diffusion coefficients and loss rates from",
"python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\",",
"of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python ::",
"a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language ::",
"coefficients and loss rates from a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\",",
"from a time-sequence of fluorescence images.\", long_description=long_description, long_description_content_type=\"text/markdown\", url=\"https://github.com/NTBEL/diffusion-fit\", packages=setuptools.find_packages(), classifiers=[ \"Programming Language",
"with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[",
"fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\",",
"\"seaborn\", \"pandas\", \"numba\", \"streamlit\", \"plotly\", ], author=\"<NAME>\", author_email=\"<EMAIL>\", description=\"Python package for extract estimates",
"long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\",",
"for extract estimates of dye/peptide diffusion coefficients and loss rates from a time-sequence",
"packages=setuptools.find_packages(), classifiers=[ \"Programming Language :: Python :: 3\", \"Operating System :: OS Independent\",",
"\"Programming Language :: Python :: 3\", \"Operating System :: OS Independent\", ], )",
"fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\", python_requires=\">=3.9\", install_requires=[ \"numpy\", \"scipy\", \"scikit-image\", \"matplotlib\", \"seaborn\", \"pandas\", \"numba\",",
"import setuptools with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"diffusionfit\", version=\"0.7.0\","
] |
[
"by the :class:`LazyDict`. Suppose that the value ``v`` has been stored in a",
"None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self, key) if member:",
"member = dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self, key) else: existing_value =",
"TypeError('pop expected at most 2 arguments, got %d' % n_args) k = args[0]",
"v, (k in lazy_dict)) v_updated = update_value(k, v, (k in lazy_dict)) Observe how",
"initialise the items in the :class:`LazyDict`. The ``update_value`` argument required by the :class:`LazyDict`",
"stored in the :class:`LazyDict` under the key ``k`` is first updated, using the",
"``lazy_dict[k] = v``. Then subsequently accessing this value in the usual manner:: v_updated",
"existing value stored under the key ``k``. This function is used as follows",
"raise TypeError('pop expected at least 1 argument, got %d' % n_args) if n_args",
"v, (k in lazy_dict)) Observe how the value stored in the :class:`LazyDict` under",
"del self[key] return key, updated_value def setdefault(self, k, x=None): if k in self:",
"# ensure measurement is up to date updated_value = self.update_value(key, existing_value, member) self[key]",
"specified, is a mapping instance used to initialise the items in the :class:`LazyDict`.",
"``k`` stored in the :class:`LazyDict`, or ``None``, if the key ``k`` is not",
"this value in the usual manner:: v_updated = lazy_dict[k] is equivalent to the",
"accessing this value in the usual manner:: v_updated = lazy_dict[k] is equivalent to",
"Dictionary with lazy evaluation on access, via a supplied update function \"\"\" import",
"update_value, items = None): \"\"\" Returns a LazyDict using the specified ``update_value`` function",
"argument required by the :class:`LazyDict` constructor must be a function of the form:",
"if n_args == 2: return args[1] else: raise KeyError(str(k)) def popitem(self): key, value",
"in the :class:`LazyDict`. The ``update_value`` argument required by the :class:`LazyDict` constructor must be",
":class:`LazyDict`. The second argument ``existing_value``, is the value corresponding to the key ``k``",
"occuring behind the scenes whenever values are read from the dictionary. The optional",
"key ``k`` is not contained in the :class:`LazyDict`. The third argument ``member`` is",
"up to date updated_value = self.update_value(key, existing_value, member) self[key] = updated_value return updated_value",
"setdefault(self, k, x=None): if k in self: return self[k] else: self[k] = x",
"used as follows by the :class:`LazyDict`. Suppose that the value ``v`` has been",
"from the :class:`LazyDict`. The second argument ``existing_value``, is the value corresponding to the",
"function and optional initial dictionary arguments. \"\"\" self.update_value = update_value if items is",
"= len(args) if n_args < 1: raise TypeError('pop expected at least 1 argument,",
"key, updated_value def setdefault(self, k, x=None): if k in self: return self[k] else:",
":class:`LazyDict`, or ``None``, if the key ``k`` is not contained in the :class:`LazyDict`.",
"follows by the :class:`LazyDict`. Suppose that the value ``v`` has been stored in",
"(k in lazy_dict)) v_updated = update_value(k, v, (k in lazy_dict)) Observe how the",
"Suppose that the value ``v`` has been stored in a :class:`LazyDict` object ``lazy_dict``",
"is used as follows by the :class:`LazyDict`. Suppose that the value ``v`` has",
"that is, ``lazy_dict[k] = v``. Then subsequently accessing this value in the usual",
"def pop(self, *args): n_args = len(args) if n_args < 1: raise TypeError('pop expected",
"argument ``existing_value``, is the value corresponding to the key ``k`` stored in the",
"< 1: raise TypeError('pop expected at least 1 argument, got %d' % n_args)",
"constructor must be a function of the form: update_value(k, existing_value, member) -> updated_value",
"function is called whenever an item with the key ``k`` is read from",
"pop(self, *args): n_args = len(args) if n_args < 1: raise TypeError('pop expected at",
"in lazy_dict)) Observe how the value stored in the :class:`LazyDict` under the key",
"provided function, with the updated value then being the one returned. \"\"\" def",
"argument, if specified, is a mapping instance used to initialise the items in",
"Then subsequently accessing this value in the usual manner:: v_updated = lazy_dict[k] is",
"return value else: if n_args == 2: return args[1] else: raise KeyError(str(k)) def",
"lazy_dict)) Observe how the value stored in the :class:`LazyDict` under the key ``k``",
"behind the scenes whenever values are read from the dictionary. The optional ``items``",
"def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self))",
"usual manner:: v_updated = lazy_dict[k] is equivalent to the following two statements:: lazy_dict[k]",
"under the key ``k``, that is, ``lazy_dict[k] = v``. Then subsequently accessing this",
"using the provided function, with the updated value then being the one returned.",
"optional ``items`` argument, if specified, is a mapping instance used to initialise the",
"existing_value, member) -> updated_value This function is called whenever an item with the",
"if member: existing_value = dict.__getitem__(self, key) else: existing_value = None # ensure measurement",
"statements:: lazy_dict[k] = update_value(k, v, (k in lazy_dict)) v_updated = update_value(k, v, (k",
"n_args == 2: return args[1] else: raise KeyError(str(k)) def popitem(self): key, value =",
"automatic lazy updates occuring behind the scenes whenever values are read from the",
"% n_args) k = args[0] if k in self: value = self[k] del",
"using the specified ``update_value`` function and optional initial dictionary arguments. \"\"\" self.update_value =",
"with the key ``k`` is read from the :class:`LazyDict`. The second argument ``existing_value``,",
"a boolean value indicating if there is an existing value stored under the",
"= updated_value return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda",
"existing_value = None # ensure measurement is up to date updated_value = self.update_value(key,",
"dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self,",
"update_value(k, v, (k in lazy_dict)) Observe how the value stored in the :class:`LazyDict`",
"subsequently accessing this value in the usual manner:: v_updated = lazy_dict[k] is equivalent",
"two statements:: lazy_dict[k] = update_value(k, v, (k in lazy_dict)) v_updated = update_value(k, v,",
"= update_value if items is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member",
"expected, with automatic lazy updates occuring behind the scenes whenever values are read",
"in the :class:`LazyDict`. The third argument ``member`` is a boolean value indicating if",
"return self[k] else: return x def values(self): return list(self.itervalues()) def items(self): return list(self.iteritems())",
"lazy_dict)) v_updated = update_value(k, v, (k in lazy_dict)) Observe how the value stored",
"is read from the :class:`LazyDict`. The second argument ``existing_value``, is the value corresponding",
"read from the :class:`LazyDict`. The second argument ``existing_value``, is the value corresponding to",
"value stored in the :class:`LazyDict` under the key ``k`` is first updated, using",
"updates values when they are accessed. All the usual dictionary methods work as",
"Observe how the value stored in the :class:`LazyDict` under the key ``k`` is",
":class:`LazyDict` constructor must be a function of the form: update_value(k, existing_value, member) ->",
"stored in the :class:`LazyDict`, or ``None``, if the key ``k`` is not contained",
"if n_args > 2: raise TypeError('pop expected at most 2 arguments, got %d'",
"as expected, with automatic lazy updates occuring behind the scenes whenever values are",
"arguments, got %d' % n_args) k = args[0] if k in self: value",
"to the following two statements:: lazy_dict[k] = update_value(k, v, (k in lazy_dict)) v_updated",
"iteritems(self): return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args =",
"following two statements:: lazy_dict[k] = update_value(k, v, (k in lazy_dict)) v_updated = update_value(k,",
"``lazy_dict`` under the key ``k``, that is, ``lazy_dict[k] = v``. Then subsequently accessing",
"dictionary. The optional ``items`` argument, if specified, is a mapping instance used to",
"self[k] del self[k] return value else: if n_args == 2: return args[1] else:",
"self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def pop(self,",
"2 arguments, got %d' % n_args) k = args[0] if k in self:",
"key ``k`` is read from the :class:`LazyDict`. The second argument ``existing_value``, is the",
"import itertools class LazyDict(dict): \"\"\" A dictionary type that lazily updates values when",
"lazily updates values when they are accessed. All the usual dictionary methods work",
"``existing_value``, is the value corresponding to the key ``k`` stored in the :class:`LazyDict`,",
"args[0] if k in self: value = self[k] del self[k] return value else:",
"in the usual manner:: v_updated = lazy_dict[k] is equivalent to the following two",
"``k`` is read from the :class:`LazyDict`. The second argument ``existing_value``, is the value",
"evaluation on access, via a supplied update function \"\"\" import itertools class LazyDict(dict):",
"or ``None``, if the key ``k`` is not contained in the :class:`LazyDict`. The",
"optional initial dictionary arguments. \"\"\" self.update_value = update_value if items is None: dict.__init__(self)",
"key) if member: existing_value = dict.__getitem__(self, key) else: existing_value = None # ensure",
"arguments. \"\"\" self.update_value = update_value if items is None: dict.__init__(self) else: dict.__init__(items) def",
"contained in the :class:`LazyDict`. The third argument ``member`` is a boolean value indicating",
"= self[k] del self[k] return value else: if n_args == 2: return args[1]",
"stored under the key ``k``. This function is used as follows by the",
"\"\"\" import itertools class LazyDict(dict): \"\"\" A dictionary type that lazily updates values",
"self.update_value(key, existing_value, member) self[key] = updated_value return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self))",
"one returned. \"\"\" def __init__(self, update_value, items = None): \"\"\" Returns a LazyDict",
"1: raise TypeError('pop expected at least 1 argument, got %d' % n_args) if",
"is a boolean value indicating if there is an existing value stored under",
"member: existing_value = dict.__getitem__(self, key) else: existing_value = None # ensure measurement is",
"items is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self, key)",
"> 2: raise TypeError('pop expected at most 2 arguments, got %d' % n_args)",
"n_args > 2: raise TypeError('pop expected at most 2 arguments, got %d' %",
"updates occuring behind the scenes whenever values are read from the dictionary. The",
"lazy evaluation on access, via a supplied update function \"\"\" import itertools class",
"return x def get(self, k, x=None): if k in self: return self[k] else:",
"as follows by the :class:`LazyDict`. Suppose that the value ``v`` has been stored",
"= None): \"\"\" Returns a LazyDict using the specified ``update_value`` function and optional",
"in self: return self[k] else: self[k] = x return x def get(self, k,",
"under the key ``k`` is first updated, using the provided function, with the",
"must be a function of the form: update_value(k, existing_value, member) -> updated_value This",
"k : (k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args = len(args) if n_args",
"= None # ensure measurement is up to date updated_value = self.update_value(key, existing_value,",
"``k`` is not contained in the :class:`LazyDict`. The third argument ``member`` is a",
"else: if n_args == 2: return args[1] else: raise KeyError(str(k)) def popitem(self): key,",
"The optional ``items`` argument, if specified, is a mapping instance used to initialise",
"def __getitem__(self, key): member = dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self, key)",
"else: existing_value = None # ensure measurement is up to date updated_value =",
"\"\"\" A dictionary type that lazily updates values when they are accessed. All",
"the key ``k`` is not contained in the :class:`LazyDict`. The third argument ``member``",
"key) else: existing_value = None # ensure measurement is up to date updated_value",
"LazyDict(dict): \"\"\" A dictionary type that lazily updates values when they are accessed.",
"updated_value return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k",
"function \"\"\" import itertools class LazyDict(dict): \"\"\" A dictionary type that lazily updates",
"*args): n_args = len(args) if n_args < 1: raise TypeError('pop expected at least",
"dictionary arguments. \"\"\" self.update_value = update_value if items is None: dict.__init__(self) else: dict.__init__(items)",
"the value ``v`` has been stored in a :class:`LazyDict` object ``lazy_dict`` under the",
"updated_value = self.update_value(key, existing_value, member) self[key] = updated_value return updated_value def copy(self): return",
"def itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k",
"= self[key] del self[key] return key, updated_value def setdefault(self, k, x=None): if k",
"the :class:`LazyDict` under the key ``k`` is first updated, using the provided function,",
"function, with the updated value then being the one returned. \"\"\" def __init__(self,",
"whenever values are read from the dictionary. The optional ``items`` argument, if specified,",
"k : self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self))",
"ensure measurement is up to date updated_value = self.update_value(key, existing_value, member) self[key] =",
"value ``v`` has been stored in a :class:`LazyDict` object ``lazy_dict`` under the key",
"access, via a supplied update function \"\"\" import itertools class LazyDict(dict): \"\"\" A",
"k, x=None): if k in self: return self[k] else: return x def values(self):",
"values when they are accessed. All the usual dictionary methods work as expected,",
"raise TypeError('pop expected at most 2 arguments, got %d' % n_args) k =",
"= value updated_value = self[key] del self[key] return key, updated_value def setdefault(self, k,",
"how the value stored in the :class:`LazyDict` under the key ``k`` is first",
"dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def pop(self, *args):",
"in the :class:`LazyDict` under the key ``k`` is first updated, using the provided",
"key, value = dict.popitem(self) self[key] = value updated_value = self[key] del self[key] return",
"to initialise the items in the :class:`LazyDict`. The ``update_value`` argument required by the",
"Returns a LazyDict using the specified ``update_value`` function and optional initial dictionary arguments.",
"n_args = len(args) if n_args < 1: raise TypeError('pop expected at least 1",
"the one returned. \"\"\" def __init__(self, update_value, items = None): \"\"\" Returns a",
"__getitem__(self, key): member = dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self, key) else:",
"a mapping instance used to initialise the items in the :class:`LazyDict`. The ``update_value``",
"most 2 arguments, got %d' % n_args) k = args[0] if k in",
"``v`` has been stored in a :class:`LazyDict` object ``lazy_dict`` under the key ``k``,",
"return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args = len(args)",
":class:`LazyDict` under the key ``k`` is first updated, using the provided function, with",
"expected at least 1 argument, got %d' % n_args) if n_args > 2:",
"value corresponding to the key ``k`` stored in the :class:`LazyDict`, or ``None``, if",
"= lazy_dict[k] is equivalent to the following two statements:: lazy_dict[k] = update_value(k, v,",
"to the key ``k`` stored in the :class:`LazyDict`, or ``None``, if the key",
"the following two statements:: lazy_dict[k] = update_value(k, v, (k in lazy_dict)) v_updated =",
":class:`LazyDict`. The ``update_value`` argument required by the :class:`LazyDict` constructor must be a function",
"class LazyDict(dict): \"\"\" A dictionary type that lazily updates values when they are",
"return key, updated_value def setdefault(self, k, x=None): if k in self: return self[k]",
"supplied update function \"\"\" import itertools class LazyDict(dict): \"\"\" A dictionary type that",
"``k``, that is, ``lazy_dict[k] = v``. Then subsequently accessing this value in the",
"All the usual dictionary methods work as expected, with automatic lazy updates occuring",
"``k`` is first updated, using the provided function, with the updated value then",
"work as expected, with automatic lazy updates occuring behind the scenes whenever values",
"value else: if n_args == 2: return args[1] else: raise KeyError(str(k)) def popitem(self):",
"= dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self, key) else: existing_value = None",
"= update_value(k, v, (k in lazy_dict)) Observe how the value stored in the",
"args[1] else: raise KeyError(str(k)) def popitem(self): key, value = dict.popitem(self) self[key] = value",
"LazyDict using the specified ``update_value`` function and optional initial dictionary arguments. \"\"\" self.update_value",
"dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self, key) else: existing_value = None #",
"the :class:`LazyDict`. The ``update_value`` argument required by the :class:`LazyDict` constructor must be a",
"items in the :class:`LazyDict`. The ``update_value`` argument required by the :class:`LazyDict` constructor must",
"This function is used as follows by the :class:`LazyDict`. Suppose that the value",
"self[k] = x return x def get(self, k, x=None): if k in self:",
"that the value ``v`` has been stored in a :class:`LazyDict` object ``lazy_dict`` under",
"else: raise KeyError(str(k)) def popitem(self): key, value = dict.popitem(self) self[key] = value updated_value",
"-> updated_value This function is called whenever an item with the key ``k``",
"initial dictionary arguments. \"\"\" self.update_value = update_value if items is None: dict.__init__(self) else:",
"lazy_dict[k] = update_value(k, v, (k in lazy_dict)) v_updated = update_value(k, v, (k in",
"existing_value, member) self[key] = updated_value return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def",
"to date updated_value = self.update_value(key, existing_value, member) self[key] = updated_value return updated_value def",
"be a function of the form: update_value(k, existing_value, member) -> updated_value This function",
"in self: value = self[k] del self[k] return value else: if n_args ==",
"``update_value`` function and optional initial dictionary arguments. \"\"\" self.update_value = update_value if items",
"n_args) k = args[0] if k in self: value = self[k] del self[k]",
"2: return args[1] else: raise KeyError(str(k)) def popitem(self): key, value = dict.popitem(self) self[key]",
"\"\"\" Returns a LazyDict using the specified ``update_value`` function and optional initial dictionary",
"argument, got %d' % n_args) if n_args > 2: raise TypeError('pop expected at",
"member) -> updated_value This function is called whenever an item with the key",
"the dictionary. The optional ``items`` argument, if specified, is a mapping instance used",
"is an existing value stored under the key ``k``. This function is used",
"item with the key ``k`` is read from the :class:`LazyDict`. The second argument",
"object ``lazy_dict`` under the key ``k``, that is, ``lazy_dict[k] = v``. Then subsequently",
"TypeError('pop expected at least 1 argument, got %d' % n_args) if n_args >",
"if k in self: return self[k] else: self[k] = x return x def",
"in a :class:`LazyDict` object ``lazy_dict`` under the key ``k``, that is, ``lazy_dict[k] =",
"None): \"\"\" Returns a LazyDict using the specified ``update_value`` function and optional initial",
"itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args = len(args) if",
"expected at most 2 arguments, got %d' % n_args) k = args[0] if",
"if there is an existing value stored under the key ``k``. This function",
"is first updated, using the provided function, with the updated value then being",
"in self: return self[k] else: return x def values(self): return list(self.itervalues()) def items(self):",
"\"\"\" self.update_value = update_value if items is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self,",
"is equivalent to the following two statements:: lazy_dict[k] = update_value(k, v, (k in",
"the key ``k`` is first updated, using the provided function, with the updated",
"instance used to initialise the items in the :class:`LazyDict`. The ``update_value`` argument required",
"scenes whenever values are read from the dictionary. The optional ``items`` argument, if",
"of the form: update_value(k, existing_value, member) -> updated_value This function is called whenever",
"from the dictionary. The optional ``items`` argument, if specified, is a mapping instance",
"__init__(self, update_value, items = None): \"\"\" Returns a LazyDict using the specified ``update_value``",
"k in self: return self[k] else: self[k] = x return x def get(self,",
"form: update_value(k, existing_value, member) -> updated_value This function is called whenever an item",
"== 2: return args[1] else: raise KeyError(str(k)) def popitem(self): key, value = dict.popitem(self)",
"manner:: v_updated = lazy_dict[k] is equivalent to the following two statements:: lazy_dict[k] =",
"k, x=None): if k in self: return self[k] else: self[k] = x return",
"itertools class LazyDict(dict): \"\"\" A dictionary type that lazily updates values when they",
"(k in lazy_dict)) Observe how the value stored in the :class:`LazyDict` under the",
"The third argument ``member`` is a boolean value indicating if there is an",
"with lazy evaluation on access, via a supplied update function \"\"\" import itertools",
"methods work as expected, with automatic lazy updates occuring behind the scenes whenever",
"not contained in the :class:`LazyDict`. The third argument ``member`` is a boolean value",
"x=None): if k in self: return self[k] else: return x def values(self): return",
"update_value(k, existing_value, member) -> updated_value This function is called whenever an item with",
"a function of the form: update_value(k, existing_value, member) -> updated_value This function is",
"are read from the dictionary. The optional ``items`` argument, if specified, is a",
"existing_value = dict.__getitem__(self, key) else: existing_value = None # ensure measurement is up",
"if specified, is a mapping instance used to initialise the items in the",
"usual dictionary methods work as expected, with automatic lazy updates occuring behind the",
"mapping instance used to initialise the items in the :class:`LazyDict`. The ``update_value`` argument",
"a LazyDict using the specified ``update_value`` function and optional initial dictionary arguments. \"\"\"",
"being the one returned. \"\"\" def __init__(self, update_value, items = None): \"\"\" Returns",
"used to initialise the items in the :class:`LazyDict`. The ``update_value`` argument required by",
"the :class:`LazyDict`. The second argument ``existing_value``, is the value corresponding to the key",
"(k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args = len(args) if n_args < 1:",
"key ``k`` is first updated, using the provided function, with the updated value",
"with the updated value then being the one returned. \"\"\" def __init__(self, update_value,",
"the key ``k`` is read from the :class:`LazyDict`. The second argument ``existing_value``, is",
"updated_value = self[key] del self[key] return key, updated_value def setdefault(self, k, x=None): if",
"dictionary methods work as expected, with automatic lazy updates occuring behind the scenes",
"specified ``update_value`` function and optional initial dictionary arguments. \"\"\" self.update_value = update_value if",
"update_value(k, v, (k in lazy_dict)) v_updated = update_value(k, v, (k in lazy_dict)) Observe",
"def popitem(self): key, value = dict.popitem(self) self[key] = value updated_value = self[key] del",
"return self[k] else: self[k] = x return x def get(self, k, x=None): if",
"there is an existing value stored under the key ``k``. This function is",
"updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k : self[k]),",
"def setdefault(self, k, x=None): if k in self: return self[k] else: self[k] =",
"LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self): return",
"v_updated = update_value(k, v, (k in lazy_dict)) Observe how the value stored in",
"in the :class:`LazyDict`, or ``None``, if the key ``k`` is not contained in",
"the updated value then being the one returned. \"\"\" def __init__(self, update_value, items",
"items = None): \"\"\" Returns a LazyDict using the specified ``update_value`` function and",
"k in self: return self[k] else: return x def values(self): return list(self.itervalues()) def",
"whenever an item with the key ``k`` is read from the :class:`LazyDict`. The",
"self[k])), dict.iterkeys(self)) def pop(self, *args): n_args = len(args) if n_args < 1: raise",
"self: value = self[k] del self[k] return value else: if n_args == 2:",
"least 1 argument, got %d' % n_args) if n_args > 2: raise TypeError('pop",
"the usual dictionary methods work as expected, with automatic lazy updates occuring behind",
"x=None): if k in self: return self[k] else: self[k] = x return x",
"if items is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self,",
"self[k] else: self[k] = x return x def get(self, k, x=None): if k",
"self[key] = updated_value return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return",
"copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def",
"value = dict.popitem(self) self[key] = value updated_value = self[key] del self[key] return key,",
"= x return x def get(self, k, x=None): if k in self: return",
"boolean value indicating if there is an existing value stored under the key",
"function of the form: update_value(k, existing_value, member) -> updated_value This function is called",
"updated_value This function is called whenever an item with the key ``k`` is",
"lazy_dict[k] is equivalent to the following two statements:: lazy_dict[k] = update_value(k, v, (k",
"def iteritems(self): return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args",
"got %d' % n_args) if n_args > 2: raise TypeError('pop expected at most",
"dict.copy(self)) def itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda",
":class:`LazyDict`. Suppose that the value ``v`` has been stored in a :class:`LazyDict` object",
"key ``k``. This function is used as follows by the :class:`LazyDict`. Suppose that",
"the key ``k``, that is, ``lazy_dict[k] = v``. Then subsequently accessing this value",
": (k, self[k])), dict.iterkeys(self)) def pop(self, *args): n_args = len(args) if n_args <",
"been stored in a :class:`LazyDict` object ``lazy_dict`` under the key ``k``, that is,",
"got %d' % n_args) k = args[0] if k in self: value =",
"is up to date updated_value = self.update_value(key, existing_value, member) self[key] = updated_value return",
"update_value if items is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member =",
"read from the dictionary. The optional ``items`` argument, if specified, is a mapping",
"return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k :",
"dict.popitem(self) self[key] = value updated_value = self[key] del self[key] return key, updated_value def",
"The second argument ``existing_value``, is the value corresponding to the key ``k`` stored",
"member) self[key] = updated_value return updated_value def copy(self): return LazyDict(self.update_value, dict.copy(self)) def itervalues(self):",
"key ``k``, that is, ``lazy_dict[k] = v``. Then subsequently accessing this value in",
"is, ``lazy_dict[k] = v``. Then subsequently accessing this value in the usual manner::",
"they are accessed. All the usual dictionary methods work as expected, with automatic",
"def get(self, k, x=None): if k in self: return self[k] else: return x",
"get(self, k, x=None): if k in self: return self[k] else: return x def",
"third argument ``member`` is a boolean value indicating if there is an existing",
"raise KeyError(str(k)) def popitem(self): key, value = dict.popitem(self) self[key] = value updated_value =",
"the :class:`LazyDict` constructor must be a function of the form: update_value(k, existing_value, member)",
"the key ``k`` stored in the :class:`LazyDict`, or ``None``, if the key ``k``",
"updated, using the provided function, with the updated value then being the one",
"k = args[0] if k in self: value = self[k] del self[k] return",
"is called whenever an item with the key ``k`` is read from the",
"a supplied update function \"\"\" import itertools class LazyDict(dict): \"\"\" A dictionary type",
"dictionary type that lazily updates values when they are accessed. All the usual",
"values are read from the dictionary. The optional ``items`` argument, if specified, is",
"equivalent to the following two statements:: lazy_dict[k] = update_value(k, v, (k in lazy_dict))",
"``member`` is a boolean value indicating if there is an existing value stored",
"the items in the :class:`LazyDict`. The ``update_value`` argument required by the :class:`LazyDict` constructor",
"value in the usual manner:: v_updated = lazy_dict[k] is equivalent to the following",
":class:`LazyDict` object ``lazy_dict`` under the key ``k``, that is, ``lazy_dict[k] = v``. Then",
"the key ``k``. This function is used as follows by the :class:`LazyDict`. Suppose",
"the form: update_value(k, existing_value, member) -> updated_value This function is called whenever an",
"return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k : (k,",
"is the value corresponding to the key ``k`` stored in the :class:`LazyDict`, or",
"else: self[k] = x return x def get(self, k, x=None): if k in",
"at least 1 argument, got %d' % n_args) if n_args > 2: raise",
"date updated_value = self.update_value(key, existing_value, member) self[key] = updated_value return updated_value def copy(self):",
"if n_args < 1: raise TypeError('pop expected at least 1 argument, got %d'",
"self[k] return value else: if n_args == 2: return args[1] else: raise KeyError(str(k))",
"This function is called whenever an item with the key ``k`` is read",
"the :class:`LazyDict`, or ``None``, if the key ``k`` is not contained in the",
"= dict.popitem(self) self[key] = value updated_value = self[key] del self[key] return key, updated_value",
"is not contained in the :class:`LazyDict`. The third argument ``member`` is a boolean",
"an existing value stored under the key ``k``. This function is used as",
"value stored under the key ``k``. This function is used as follows by",
"= update_value(k, v, (k in lazy_dict)) v_updated = update_value(k, v, (k in lazy_dict))",
"1 argument, got %d' % n_args) if n_args > 2: raise TypeError('pop expected",
"the :class:`LazyDict`. Suppose that the value ``v`` has been stored in a :class:`LazyDict`",
"The ``update_value`` argument required by the :class:`LazyDict` constructor must be a function of",
"an item with the key ``k`` is read from the :class:`LazyDict`. The second",
"self[key] del self[key] return key, updated_value def setdefault(self, k, x=None): if k in",
"``k``. This function is used as follows by the :class:`LazyDict`. Suppose that the",
"dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self, key) if member: existing_value",
"that lazily updates values when they are accessed. All the usual dictionary methods",
"with automatic lazy updates occuring behind the scenes whenever values are read from",
"None # ensure measurement is up to date updated_value = self.update_value(key, existing_value, member)",
"is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self, key) if",
"len(args) if n_args < 1: raise TypeError('pop expected at least 1 argument, got",
"A dictionary type that lazily updates values when they are accessed. All the",
":class:`LazyDict`. The third argument ``member`` is a boolean value indicating if there is",
"dict.iterkeys(self)) def pop(self, *args): n_args = len(args) if n_args < 1: raise TypeError('pop",
"at most 2 arguments, got %d' % n_args) k = args[0] if k",
"= dict.__getitem__(self, key) else: existing_value = None # ensure measurement is up to",
"the scenes whenever values are read from the dictionary. The optional ``items`` argument,",
"a :class:`LazyDict` object ``lazy_dict`` under the key ``k``, that is, ``lazy_dict[k] = v``.",
"the :class:`LazyDict`. The third argument ``member`` is a boolean value indicating if there",
": self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k : (k, self[k])), dict.iterkeys(self)) def",
"%d' % n_args) if n_args > 2: raise TypeError('pop expected at most 2",
"self.update_value = update_value if items is None: dict.__init__(self) else: dict.__init__(items) def __getitem__(self, key):",
"second argument ``existing_value``, is the value corresponding to the key ``k`` stored in",
"argument ``member`` is a boolean value indicating if there is an existing value",
"= v``. Then subsequently accessing this value in the usual manner:: v_updated =",
"has been stored in a :class:`LazyDict` object ``lazy_dict`` under the key ``k``, that",
"in lazy_dict)) v_updated = update_value(k, v, (k in lazy_dict)) Observe how the value",
"return args[1] else: raise KeyError(str(k)) def popitem(self): key, value = dict.popitem(self) self[key] =",
"the provided function, with the updated value then being the one returned. \"\"\"",
"is a mapping instance used to initialise the items in the :class:`LazyDict`. The",
"via a supplied update function \"\"\" import itertools class LazyDict(dict): \"\"\" A dictionary",
"key ``k`` stored in the :class:`LazyDict`, or ``None``, if the key ``k`` is",
"itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k :",
"stored in a :class:`LazyDict` object ``lazy_dict`` under the key ``k``, that is, ``lazy_dict[k]",
"the usual manner:: v_updated = lazy_dict[k] is equivalent to the following two statements::",
"v``. Then subsequently accessing this value in the usual manner:: v_updated = lazy_dict[k]",
"indicating if there is an existing value stored under the key ``k``. This",
"\"\"\" def __init__(self, update_value, items = None): \"\"\" Returns a LazyDict using the",
"the value corresponding to the key ``k`` stored in the :class:`LazyDict`, or ``None``,",
"k in self: value = self[k] del self[k] return value else: if n_args",
"``items`` argument, if specified, is a mapping instance used to initialise the items",
"the value stored in the :class:`LazyDict` under the key ``k`` is first updated,",
"and optional initial dictionary arguments. \"\"\" self.update_value = update_value if items is None:",
"self: return self[k] else: self[k] = x return x def get(self, k, x=None):",
"lazy updates occuring behind the scenes whenever values are read from the dictionary.",
"corresponding to the key ``k`` stored in the :class:`LazyDict`, or ``None``, if the",
"self: return self[k] else: return x def values(self): return list(self.itervalues()) def items(self): return",
"type that lazily updates values when they are accessed. All the usual dictionary",
"if k in self: return self[k] else: return x def values(self): return list(self.itervalues())",
"itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self): return itertools.imap((lambda k : (k, self[k])),",
"x return x def get(self, k, x=None): if k in self: return self[k]",
"required by the :class:`LazyDict` constructor must be a function of the form: update_value(k,",
"then being the one returned. \"\"\" def __init__(self, update_value, items = None): \"\"\"",
"def __init__(self, update_value, items = None): \"\"\" Returns a LazyDict using the specified",
"= self.update_value(key, existing_value, member) self[key] = updated_value return updated_value def copy(self): return LazyDict(self.update_value,",
"called whenever an item with the key ``k`` is read from the :class:`LazyDict`.",
"if k in self: value = self[k] del self[k] return value else: if",
"2: raise TypeError('pop expected at most 2 arguments, got %d' % n_args) k",
"returned. \"\"\" def __init__(self, update_value, items = None): \"\"\" Returns a LazyDict using",
"``None``, if the key ``k`` is not contained in the :class:`LazyDict`. The third",
"else: dict.__init__(items) def __getitem__(self, key): member = dict.__contains__(self, key) if member: existing_value =",
"value then being the one returned. \"\"\" def __init__(self, update_value, items = None):",
"return LazyDict(self.update_value, dict.copy(self)) def itervalues(self): return itertools.imap((lambda k : self[k]), dict.iterkeys(self)) def iteritems(self):",
"the specified ``update_value`` function and optional initial dictionary arguments. \"\"\" self.update_value = update_value",
"dict.__getitem__(self, key) else: existing_value = None # ensure measurement is up to date",
"= args[0] if k in self: value = self[k] del self[k] return value",
"first updated, using the provided function, with the updated value then being the",
"are accessed. All the usual dictionary methods work as expected, with automatic lazy",
"self[key] = value updated_value = self[key] del self[key] return key, updated_value def setdefault(self,",
"del self[k] return value else: if n_args == 2: return args[1] else: raise",
"%d' % n_args) k = args[0] if k in self: value = self[k]",
"n_args < 1: raise TypeError('pop expected at least 1 argument, got %d' %",
"v_updated = lazy_dict[k] is equivalent to the following two statements:: lazy_dict[k] = update_value(k,",
"accessed. All the usual dictionary methods work as expected, with automatic lazy updates",
"update function \"\"\" import itertools class LazyDict(dict): \"\"\" A dictionary type that lazily",
"value indicating if there is an existing value stored under the key ``k``.",
"measurement is up to date updated_value = self.update_value(key, existing_value, member) self[key] = updated_value",
"popitem(self): key, value = dict.popitem(self) self[key] = value updated_value = self[key] del self[key]",
"value updated_value = self[key] del self[key] return key, updated_value def setdefault(self, k, x=None):",
"``update_value`` argument required by the :class:`LazyDict` constructor must be a function of the",
"if the key ``k`` is not contained in the :class:`LazyDict`. The third argument",
"\"\"\" Dictionary with lazy evaluation on access, via a supplied update function \"\"\"",
"key): member = dict.__contains__(self, key) if member: existing_value = dict.__getitem__(self, key) else: existing_value",
"x def get(self, k, x=None): if k in self: return self[k] else: return",
"on access, via a supplied update function \"\"\" import itertools class LazyDict(dict): \"\"\"",
"when they are accessed. All the usual dictionary methods work as expected, with",
"by the :class:`LazyDict` constructor must be a function of the form: update_value(k, existing_value,",
"updated value then being the one returned. \"\"\" def __init__(self, update_value, items =",
"KeyError(str(k)) def popitem(self): key, value = dict.popitem(self) self[key] = value updated_value = self[key]",
"under the key ``k``. This function is used as follows by the :class:`LazyDict`.",
"value = self[k] del self[k] return value else: if n_args == 2: return",
"function is used as follows by the :class:`LazyDict`. Suppose that the value ``v``",
"% n_args) if n_args > 2: raise TypeError('pop expected at most 2 arguments,",
"self[key] return key, updated_value def setdefault(self, k, x=None): if k in self: return",
"updated_value def setdefault(self, k, x=None): if k in self: return self[k] else: self[k]",
"n_args) if n_args > 2: raise TypeError('pop expected at most 2 arguments, got"
] |
[
"generated, do not edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster',",
"from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool',",
"'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath', 'scsiTarget', 'scsiVolume', 'storagePod', 'vm', 'vmFile', )",
"pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter',",
"not edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore',",
"# Automatically generated, do not edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType =",
"import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath',",
"######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount',",
"edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host',",
"Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath', 'scsiTarget',",
"= Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath', 'scsiTarget', 'scsiVolume', 'storagePod',",
"Automatically generated, do not edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum(",
"EntityReferenceEntityType = Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath', 'scsiTarget', 'scsiVolume',",
"Enum( 'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath', 'scsiTarget', 'scsiVolume', 'storagePod', 'vm',",
"'cluster', 'datacenter', 'datastore', 'host', 'nasMount', 'resourcePool', 'scsiAdapter', 'scsiPath', 'scsiTarget', 'scsiVolume', 'storagePod', 'vm', 'vmFile',",
"######################################## # Automatically generated, do not edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType",
"do not edit. ######################################## from pyvisdk.thirdparty import Enum EntityReferenceEntityType = Enum( 'cluster', 'datacenter',"
] |
[
"checkSceneExists(scriptScene, projectScene): # Check and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else:",
"now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize,",
"& ColorChecker exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex,",
"cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand =",
"already in the project RmanAutoLookdev folder') # Check and copy ColorChecker if not",
"cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle",
"in the beauty alpha and deactivate the learn light from results def setRmanShadow():",
"2019 DUBOIX <NAME> and <NAME> - Licensed under the Apache License, Version 2.0",
"if it is already displayed on the screen if cmds.window(\"winID\", exists = True):",
"of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window if",
"field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True,",
"= cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100, value=1, precision = 3, field",
"# Set Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder')",
"the toggle for the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder')",
"# Set the shadow output in the Beauty Alpha setRmanShadow() # Get the",
"output in the Beauty Alpha setRmanShadow() # Get the string for the maya",
"script directory def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if",
"RmanAutoLookdev folder') # Check and copy ColorChecker # Importing the lookdev as reference",
"results def setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\",",
"if ballsType == \"Full\": print(\"Setting the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0)",
"# Check and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene",
"Lookdev as reference if it does not exist in the scene importLookdev(projectScene) #",
"path project = getProjectPath() srcIMG = project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter =",
"# Set background Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" )",
"cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator( height = 40 )",
"cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit =",
"cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) #",
"scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder",
"print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to Cyclo",
"cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType,",
"checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR",
"label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType, bgStyle))",
"cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\")",
"the HDRI and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr =",
"+ \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng,",
"font = \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground = True, height =",
"cmds.file(projectScene, r=True, uns = False ) # Set the output of the shadows",
"Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type #### def updateCycloType(cycloStyle,",
"True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50,",
"only to define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100,",
"= 'placeholder') # Slider calling the function as the variable is now defined",
"the script Path folder before copying the HDRI scriptFolder = getScriptPath() # Create",
"Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True,",
"q=True, v=True)) if bgType == \"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0)",
"def checkSceneExists(scriptScene, projectScene): # Check and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene)",
"changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo",
"(cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\": print(\"Setting the Shader Balls to full",
"85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\",",
"beauty alpha and deactivate the learn light from results def setRmanShadow(): print(\"Set Renderman",
"Global Scale\", min=0.001, max=100, value=1, precision = 3, field = True, dragCommand ='placeholder',",
"= 40 ) # Set number of Shading balls shadingBalls = cmds.optionMenu(label =",
"= \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls,",
"= -100, max = 100, value = 0, step = 0.5, field =",
"cmds.separator( height = 40 ) # Change HDRI changeHDR = cmds.button(label = \"Change",
"http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window if it is already displayed on",
"srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get the script Path folder before copying",
"scene exists and copies it if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene",
"length and compensates for it cameraFocal = cmds.optionMenu(label = \"Camera focal length\", changeCommand",
"to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1)",
"changeHDRdef(): # Get the new HDR path project = getProjectPath() srcIMG = project",
"# Force update if the value is entered as string # Layout cmds.separator(",
"existing window if it is already displayed on the screen if cmds.window(\"winID\", exists",
"is already displayed on the screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") #",
"the value is entered as string # Layout cmds.separator( height = 40 )",
"is entered as string # Layout cmds.separator( height = 40 ) # Set",
"import copyfile from functools import partial #### Copyright 2019 DUBOIX <NAME> and <NAME>",
"+ \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex,",
"Global Scale # Hidden slider, serves only to define varible scaleSlider = cmds.floatSliderGrp(label",
"Dolly\", min = -100, max = 100, value = 0, step = 0.5,",
"= 'placeholder') cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label = \"telelens - 85mm\")",
"project + \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get the script Path",
"= partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max =",
"= (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue)",
"def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic -",
"srcIMG ) # Change the path to the new HDR if __name__ ==",
"print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns = False ) # Set the",
"the Apache License, Version 2.0 (the \"License\"); #### you may not use this",
"update if the value is entered as string # Layout cmds.separator( height =",
"= 40 ) # Set Global Scale # Hidden slider, serves only to",
"0, max = 1, value = 0.02, precision = 3, field = True,",
"= (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0)",
"= partial(udateBgValue, colorStyle)) # Layout cmds.separator( height = 40 ) # Set number",
"cmds.separator( height = 40 ) # Set number of Shading balls shadingBalls =",
"40 ) # Change HDRI changeHDR = cmds.button(label = \"Change HDR\", command =",
"it is already displayed on the screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\")",
"bgStyle)) # Set Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand =",
"folder') def checkSceneExists(scriptScene, projectScene): # Check and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene,",
"= 0, max = 1, value = 0.02, precision = 3, field =",
"= True, dragCommand ='placeholder', changeCommand = 'placeholder') # Slider calling the function as",
"= partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\",",
"else: if camFocLength == \"telelens - 85mm\": print(\"Setting camera focal length to 85mm\")",
"= cmds.optionMenu(label = \"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic -",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the new HDR path project = getProjectPath()",
"windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290,",
"partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height = 40 ) # Set the camera",
"Get the script Path folder before copying the HDRI scriptFolder = getScriptPath() #",
"balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\")",
"Check if the Lookdev scene exists and copies it if not scriptScene =",
"= 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand",
"True, dragCommand ='placeholder', changeCommand = 'placeholder') # Slider calling the function as the",
"#### Update Background Type #### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True))",
"type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState",
"0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal,",
"in the project RmanAutoLookdev folder') # Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex):",
"height = 40 ) # Change HDRI changeHDR = cmds.button(label = \"Change HDR\",",
"else: print('default HDR is already in the project RmanAutoLookdev folder') # Check and",
"varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100, value=1, precision =",
"scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev folder exists",
"to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo to Grid texture\")",
"copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is already in the project RmanAutoLookdev folder')",
"Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label =",
"= \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight",
"not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") # Check if the HDRI",
"tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max = 100, value =",
"updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant Color\": print(\"Setting",
"= 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight,",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState ==",
"= True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand",
"= cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1,",
"= True, dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator(",
"iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label",
"cmds import os from shutil import copyfile from functools import partial #### Copyright",
"serves only to define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001,",
"changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand =",
"= cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max = 50, value = 0,",
"Size #### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale)",
"(cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\",",
"Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color #### def udateBgValue(colorStyle,",
"define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100, value=1, precision",
"camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength ==",
"def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update",
"- 85mm\": print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else:",
"\"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground = True, height = 80) cmds.separator()",
"does not exist in the scene importLookdev(projectScene) # Set the shadow output in",
"changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height = 40 ) # Set",
"cmds.text( label = \"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290, 0.705,",
"'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle))",
"height = 40 ) # Set number of Shading balls shadingBalls = cmds.optionMenu(label",
"full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader Balls to minimal type\")",
"= cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode = 1, dir = srcIMG",
"# Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else:",
"project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode =",
"cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth",
"Importing the lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True,",
"project and check if AutoLookdev folder is present project = getProjectPath() srcIMG =",
"= True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height = 40 )",
"folder') # Check and copy ColorChecker # Importing the lookdev as reference def",
"def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value to",
"rootDirectory=True ) return(realProject) # Get the string for the script directory def getScriptPath():",
"= \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle, colorCheck))",
"# Layout cmds.separator( height = 40 ) # Change HDRI changeHDR = cmds.button(label",
"if it does not exist in the scene importLookdev(projectScene) # Set the shadow",
"to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader Balls to minimal",
"Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default",
"True, dragCommand = 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand",
"field = True, dragCommand = 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit =",
"is already in the project RmanAutoLookdev folder') # Check and copy ColorChecker #",
"srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev scene exists and copies it if",
"ballsType == \"Full\": print(\"Setting the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else",
"srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev",
"= srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder +",
"= cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max = 100, value = 0,",
"srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\"",
"= 3, field = True, dragCommand ='placeholder', changeCommand = 'placeholder') # Slider calling",
"edit = True, dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout",
"= 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand",
"ColorChecker exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng,",
"if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is already in the project",
"widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label =",
"max = 1, value = 0.02, precision = 3, field = True, dragCommand",
"r=True, uns = False ) # Set the output of the shadows in",
"screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window windowWidth = 400 window",
"label = \"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909],",
"colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\",",
"ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength",
"project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") #",
"(cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else:",
"the maya project path def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject)",
"Lookdev set up definitions #### def lookdevAuto(): # Get srcIMG folder in project",
"dragCommand ='placeholder', changeCommand = 'placeholder') # Slider calling the function as the variable",
"scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex,",
"in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check and copy HDR",
"Licensed under the Apache License, Version 2.0 (the \"License\"); #### you may not",
"copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker",
"a copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing",
"Set background Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem(",
"changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True,",
"in project and check if AutoLookdev folder is present project = getProjectPath() srcIMG",
"os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev folder exists in the",
"0) #### Update Background Color #### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True,",
"Shading balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label =",
"exist in the scene importLookdev(projectScene) # Set the shadow output in the Beauty",
"+ \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr",
"srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\"",
"the shadows in the beauty alpha and deactivate the learn light from results",
"= True, dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak",
"Layout cmds.separator( height = 40 ) # Set Global Scale # Hidden slider,",
"colorStyle)) # Layout cmds.separator( height = 40 ) # Set number of Shading",
"length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label = \"telelens",
"colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator( height = 40 ) #",
"to define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100, value=1,",
"0, step = 0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder')",
"scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev",
"= 0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth,",
"= \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator(",
"Slider calling the function as the variable is now defined cmds.floatSliderGrp(scaleSlider, edit =",
"shadows in the beauty alpha and deactivate the learn light from results def",
"print(\"RmanAutoLookdev folder already exists\") # Check if the HDRI & ColorChecker exists in",
"setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) ####",
"of Shading balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label",
"string for the maya project path def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True",
"toggle for the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label",
"projectScene) else: print('Lookdev scene is already in the project RmanAutoLookdev folder') # Check",
"bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle,",
"= True, dragCommand = 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True,",
"srcIMGcolorCheckerPng) else: print('default Color Checker is already in the project RmanAutoLookdev folder') def",
"import maya.cmds as cmds import os from shutil import copyfile from functools import",
"columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor =",
"edit = True, dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force",
"Checker is already in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check",
"Color #### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background",
"edit = True, changeCommand = partial(shadingBallType, shadingBalls)) # Set the toggle for the",
"to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens - 85mm\":",
"# Change the path to the new HDR if __name__ == '__main__': main()",
"= project + \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get the script",
"set up definitions #### def lookdevAuto(): # Get srcIMG folder in project and",
"# Layout cmds.separator( height = 40 ) # Set Global Scale # Hidden",
"in compliance with the License. You may obtain a copy of the License",
"importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns = False ) # Set",
"Background Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min = 0, max =",
"Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size #### def updateSize(scaleSlider, *args):",
"= \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\")",
") cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth))",
"folder before copying the HDRI scriptFolder = getScriptPath() # Create the folder if",
"changeCommand = 'placeholder') cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label = \"telelens -",
"if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex",
"reference if it does not exist in the scene importLookdev(projectScene) # Set the",
"= 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40 )",
"#### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant",
"0) else : print(\"Setting the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def",
"HDRI changeHDR = cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\") # Reset #",
"# Get the new HDR path project = getProjectPath() srcIMG = project +",
"Scale # Hidden slider, serves only to define varible scaleSlider = cmds.floatSliderGrp(label =",
"precision = 3, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle,",
"Layout cmds.separator( height = 40 ) # Set number of Shading balls shadingBalls",
"(cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic - 50 mm\": print(\"Setting camera focal",
"partial #### Copyright 2019 DUBOIX <NAME> and <NAME> - Licensed under the Apache",
"q=True, v=True)) if camFocLength == \"classic - 50 mm\": print(\"Setting camera focal length",
"import os from shutil import copyfile from functools import partial #### Copyright 2019",
"License. You may obtain a copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def",
"cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window windowWidth = 400 window = cmds.window(\"winID\",",
"'lookdevAuto()') # Layout cmds.separator( height = 40 ) # Set Global Scale #",
"Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\", font",
"already in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check and copy",
"+ \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if",
"dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator( height =",
"realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get the string for the",
"= \"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic - 50 mm\")",
"obtain a copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill",
"40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator(",
"cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType, shadingBalls)) # Set the toggle for",
"shadingBalls)) # Set the toggle for the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\",",
"checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") # Check if",
"Change HDRI changeHDR = cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\") # Reset",
"if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker if needed srcIMGhdrTex =",
"maya project path def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject) #",
"cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max = 50, value = 0, step",
"Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size",
"else : print(\"Setting the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck,",
"def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): #",
"def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy",
") #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) ####",
"value is entered as string # Layout cmds.separator( height = 40 ) #",
"85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal,",
"output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size ####",
"scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\"",
"the string for the maya project path def getProjectPath(): realProject = cmds.workspace( q=True,",
"cycloStyle)) # Set Background Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min =",
"127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth,",
"= scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import",
"changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef():",
"\"Lookdev Global Scale\", min=0.001, max=100, value=1, precision = 3, field = True, dragCommand",
"# Slider calling the function as the variable is now defined cmds.floatSliderGrp(scaleSlider, edit",
"+ \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode = 1,",
"# Check if the Lookdev scene exists and copies it if not scriptScene",
"bgType == \"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the",
"= True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand",
"HDRI scriptFolder = getScriptPath() # Create the folder if needed checkFolderExists(srcIMGlookDev) # Copy",
"cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit =",
"the HDRI & ColorChecker exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex,",
"changeCommand = partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle = cmds.floatSliderGrp(label = \"Background",
"Set Background Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min = 0, max",
"colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min = 0, max = 1, value",
"\"License\"); #### you may not use this file except in compliance with the",
"Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader Balls to",
"cmds.showWindow( window ) #### Auto Lookdev set up definitions #### def lookdevAuto(): #",
"True, dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force update if",
"def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev",
"Apache License, Version 2.0 (the \"License\"); #### you may not use this file",
"is already in the project RmanAutoLookdev folder') # Check and copy ColorChecker if",
"- 50 mm\": print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103)",
"\"Change HDR\", command = \"changeHDRdef()\") # Reset # Show window - Need update",
"reference\") cmds.file(projectScene, r=True, uns = False ) # Set the output of the",
"= \"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator( height = 40 ) #",
"may obtain a copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): #",
"in the project RmanAutoLookdev folder') # Check and copy ColorChecker # Importing the",
"cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\", font = \"boldLabelFont\",",
") cmds.separator( height = 40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\", command =",
"partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand",
"\"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label",
"dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\",",
"if the RmanAutoLookdev folder exists in the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev):",
"1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size #### def updateSize(scaleSlider, *args): globalScale =",
"# Reset # Show window - Need update cmds.showWindow( window ) #### Auto",
"Set the shadow output in the Beauty Alpha setRmanShadow() # Get the string",
"Show window - Need update cmds.showWindow( window ) #### Auto Lookdev set up",
"of the shadows in the beauty alpha and deactivate the learn light from",
"\"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def",
"cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal, cameraFocal))",
"and check if AutoLookdev folder is present project = getProjectPath() srcIMG = project",
"#### Update Lookdev Size #### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True))",
"=\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit",
"bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\": print(\"Setting the Background to",
"= True, changeCommand = partial(shadingBallType, shadingBalls)) # Set the toggle for the colorchecker",
"+ \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex",
"project path def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get",
"-50, max = 50, value = 0, step = 0.5, field = True,",
"up definitions #### def lookdevAuto(): # Get srcIMG folder in project and check",
"copies it if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev +",
"def main(): # Kill existing window if it is already displayed on the",
"the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo to",
"True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height = 40 ) #",
"if camFocLength == \"classic - 50 mm\": print(\"Setting camera focal length to 50mm\")",
"changeCommand = 'placeholder') # Slider calling the function as the variable is now",
"= (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant Color\": print(\"Setting the Cyclo to",
"= (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\": print(\"Setting the Background to infinite\")",
"'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly, tweakDepth),",
"copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window",
"3, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit =",
"srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev scene exists and",
"40 ) # Set the camera focal length and compensates for it cameraFocal",
"= 0, step = 0.5, field = True, dragCommand = 'placeholder', changeCommand =",
"def changeHDRdef(): # Get the new HDR path project = getProjectPath() srcIMG =",
"Set the toggle for the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand =",
"definitions #### def lookdevAuto(): # Get srcIMG folder in project and check if",
"focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens",
"copyfile(scriptScene, projectScene) else: print('Lookdev scene is already in the project RmanAutoLookdev folder') #",
"v=True)) if colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the",
"= (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic - 50 mm\": print(\"Setting camera",
"Value\", min = 0, max = 1, value = 0.02, precision = 3,",
"40 ) # Set number of Shading balls shadingBalls = cmds.optionMenu(label = \"Shader",
"Type #### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType ==",
"133.103) else: if camFocLength == \"telelens - 85mm\": print(\"Setting camera focal length to",
"os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") # Check if the HDRI &",
"step = 0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight,",
"udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value to \"+",
"== \"telelens - 85mm\": print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\",",
"max = 50, value = 0, step = 0.5, field = True, dragCommand",
"cycloType == \"Constant Color\": print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else",
"length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight,",
"cmds.separator( height = 40 ) # Set the camera focal length and compensates",
"scene is already in the project RmanAutoLookdev folder') # Check and copy ColorChecker",
"as reference\") cmds.file(projectScene, r=True, uns = False ) # Set the output of",
"may not use this file except in compliance with the License. You may",
"it if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\"",
"= 40 ) # Set the camera focal length and compensates for it",
"40 ) # Set background Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem(",
"copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already in the project RmanAutoLookdev",
"= -50, max = 50, value = 0, step = 0.5, field =",
"srcIMG folder in project and check if AutoLookdev folder is present project =",
"Window windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) ####",
"Beauty Alpha setRmanShadow() # Get the string for the maya project path def",
"50, value = 0, step = 0.5, field = True, dragCommand = 'placeholder',",
"the learn light from results def setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's",
"edit = True, changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle =",
"- 50 mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label = \"widelens - 28mm\")",
"it cameraFocal = cmds.optionMenu(label = \"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label =",
"partial(updateSize, scaleSlider)) # Force update if the value is entered as string #",
"scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev scene exists and copies",
"from functools import partial #### Copyright 2019 DUBOIX <NAME> and <NAME> - Licensed",
"with the License. You may obtain a copy of the License at: ####",
"= \"changeHDRdef()\") # Reset # Show window - Need update cmds.showWindow( window )",
"(cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type #### def updateBgType(bgStyle,",
"the project RmanAutoLookdev folder') # Check and copy ColorChecker # Importing the lookdev",
"#### Update Background Color #### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True))",
"*args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the",
"changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the new",
"HDRI & ColorChecker exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr,",
"85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\",",
"= 'lookdevAuto()') # Layout cmds.separator( height = 40 ) # Set Global Scale",
"the beauty alpha and deactivate the learn light from results def setRmanShadow(): print(\"Set",
"needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev",
"and compensates for it cameraFocal = cmds.optionMenu(label = \"Camera focal length\", changeCommand =",
"and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is already",
"dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight,",
"\"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev folder exists in the project def",
"partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max = 50,",
"License, Version 2.0 (the \"License\"); #### you may not use this file except",
"v=True)) if cycloType == \"Constant Color\": print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\",",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType ==",
"fileMode = 1, dir = srcIMG ) # Change the path to the",
"Set Cyclo Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label",
"# Set the output of the shadows in the beauty alpha and deactivate",
"shadow output in the Beauty Alpha setRmanShadow() # Get the string for the",
"cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max = 50, value",
"needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex =",
"= srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev +",
"#### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value",
"checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev +",
"Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit =",
"<NAME> - Licensed under the Apache License, Version 2.0 (the \"License\"); #### you",
"='placeholder', changeCommand = 'placeholder') # Slider calling the function as the variable is",
"#### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\":",
") return(realProject) # Get the string for the script directory def getScriptPath(): scriptPath",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens - 85mm\": print(\"Setting camera focal length",
"copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already in the project RmanAutoLookdev folder') #",
"precision = 3, field = True, dragCommand ='placeholder', changeCommand = 'placeholder') # Slider",
"= \"classic - 50 mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label = \"widelens",
"projectScene): # Check and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev",
"learn light from results def setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's Alpha\")",
"from results def setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1)",
"\"RmanAutoLookdev\" # Get the script Path folder before copying the HDRI scriptFolder =",
"Reset # Show window - Need update cmds.showWindow( window ) #### Auto Lookdev",
"# Layout cmds.separator( height = 40 ) # Set background Type bgStyle =",
"scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex,",
"changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit =",
"= cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\") # Reset # Show window",
"srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev",
"Check and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default",
"partial(camDolly, tweakDepth)) # Layout cmds.separator( height = 40 ) # Change HDRI changeHDR",
"getScriptPath() # Create the folder if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and",
"True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand =",
"Get the string for the maya project path def getProjectPath(): realProject = cmds.workspace(",
"= 0.02, precision = 3, field = True, dragCommand = 'placeholder', changeCommand =",
"constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\",",
"Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType",
"True, dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera",
": print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color",
"os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") # Check if the HDRI & ColorChecker",
"script Path folder before copying the HDRI scriptFolder = getScriptPath() # Create the",
"\"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator( height = 40 ) # Set",
") # Set number of Shading balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\",",
"partial(shadingBallType, shadingBalls)) # Set the toggle for the colorchecker colorCheck = cmds.optionMenu(label =\"Color",
"the camera focal length and compensates for it cameraFocal = cmds.optionMenu(label = \"Camera",
"= 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True,",
"directory def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the",
"scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex,",
"partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator( height = 40 )",
"if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") # Check if the",
"if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already in",
"print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type #### def updateBgType(bgStyle, *args): bgType =",
"srcIMG = project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1,",
"camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly",
"this file except in compliance with the License. You may obtain a copy",
"= cmds.floatSliderGrp(label = \"Background Value\", min = 0, max = 1, value =",
"changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max",
"not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is already in the project RmanAutoLookdev",
"else: print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type",
"= \"Lookdev Global Scale\", min=0.001, max=100, value=1, precision = 3, field = True,",
"value = 0.02, precision = 3, field = True, dragCommand = 'placeholder', changeCommand",
"\"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference if it does not exist",
"the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color #### def",
"= scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr,",
"You may obtain a copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main():",
"'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight))",
"cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max = 100, value = 0, step",
"scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\"",
"print(\"Setting the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState",
"you may not use this file except in compliance with the License. You",
"50 mm\": print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else:",
"Auto Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground = True,",
"and <NAME> - Licensed under the Apache License, Version 2.0 (the \"License\"); ####",
"exists = True): cmds.deleteUI(\"winID\") # Window windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\",",
"# Set the toggle for the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand",
"str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType",
"shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\": print(\"Setting the",
"Scale\", min=0.001, max=100, value=1, precision = 3, field = True, dragCommand ='placeholder', changeCommand",
"= 1, dir = srcIMG ) # Change the path to the new",
"\"telelens - 85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand",
"and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev +",
"print('Lookdev scene is already in the project RmanAutoLookdev folder') # Check and copy",
"colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label",
"# Create the folder if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker",
"License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window if it is",
") # Set the camera focal length and compensates for it cameraFocal =",
"= srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference if it",
"RmanAutoLookdev folder') # Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng,",
"as reference def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns = False",
"alpha and deactivate the learn light from results def setRmanShadow(): print(\"Set Renderman Shadow",
"50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens - 85mm\": print(\"Setting",
"(cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the new HDR path",
"= srcIMG ) # Change the path to the new HDR if __name__",
"0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit =",
"folder already exists\") # Check if the HDRI & ColorChecker exists in the",
"Get the string for the script directory def getScriptPath(): scriptPath = os.path.expanduser('~') +",
"value = 0, step = 0.5, field = True, dragCommand = 'placeholder', changeCommand",
"project RmanAutoLookdev folder') # Check and copy ColorChecker # Importing the lookdev as",
"def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\": print(\"Setting",
"\"changeHDRdef()\") # Reset # Show window - Need update cmds.showWindow( window ) ####",
"maya.cmds as cmds import os from shutil import copyfile from functools import partial",
"cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType, shadingBalls)) # Set",
"project RmanAutoLookdev folder') # Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex)",
"\"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit",
"the new HDR path project = getProjectPath() srcIMG = project + \"sourceimages/\" file",
"from shutil import copyfile from functools import partial #### Copyright 2019 DUBOIX <NAME>",
"Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size #### def updateSize(scaleSlider,",
"0.909], enableBackground = True, height = 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 )",
"= \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground = True, height = 80)",
"*args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing the ColorChecker\")",
"light from results def setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\",",
"# Window windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) )",
"Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck,",
"folder is present project = getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev =",
"focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length",
"background Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\"",
"scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder",
"dir = srcIMG ) # Change the path to the new HDR if",
"step = 0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder' )",
"3, field = True, dragCommand ='placeholder', changeCommand = 'placeholder') # Slider calling the",
"in the scene importLookdev(projectScene) # Set the shadow output in the Beauty Alpha",
"= getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" #",
"Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min = 0, max = 1,",
"cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\")",
"Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground = True, height",
"Update Background Color #### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting",
"if the value is entered as string # Layout cmds.separator( height = 40",
"cmds.deleteUI(\"winID\") # Window windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400)",
"'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand",
"edit = True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height = 40",
"Layout cmds.separator( height = 40 ) # Set the camera focal length and",
"ColorChecker if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\"",
"\"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex =",
"= 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly, tweakDepth), changeCommand =",
"folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and",
"colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label =",
"the output of the shadows in the beauty alpha and deactivate the learn",
"Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls,",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength =",
"= \"Background Value\", min = 0, max = 1, value = 0.02, precision",
"partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min =",
"partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator( height = 40 )",
"= (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True,",
"value=1, precision = 3, field = True, dragCommand ='placeholder', changeCommand = 'placeholder') #",
"q=True, v=True)) if ballsType == \"Full\": print(\"Setting the Shader Balls to full type\")",
"globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type ####",
"if bgType == \"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting",
"not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already in the",
"= partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max =",
"Set number of Shading balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand =",
"use this file except in compliance with the License. You may obtain a",
"cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\") # Reset # Show window -",
"getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get the string for",
"HDR is already in the project RmanAutoLookdev folder') # Check and copy ColorChecker",
"to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type #### def updateCycloType(cycloStyle, *args):",
"print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo",
"texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color #### def udateBgValue(colorStyle, *args): bgValue =",
"checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference if it does not exist in",
"the string for the script directory def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\"",
"changeHDR = cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\") # Reset # Show",
"[0.290, 0.705, 0.909], enableBackground = True, height = 80) cmds.separator() # Layout cmds.rowColumnLayout(",
"title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text(",
"*args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly =",
"cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) #",
"as reference if it does not exist in the scene importLookdev(projectScene) # Set",
"the Lookdev scene exists and copies it if not scriptScene = scriptFolder +",
"colorCheck)) # Layout cmds.separator( height = 40 ) # Set the camera focal",
"- 85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand =",
"= \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType, cycloStyle)) # Set Background",
"the function as the variable is now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand",
"# Get the string for the script directory def getScriptPath(): scriptPath = os.path.expanduser('~')",
"- Need update cmds.showWindow( window ) #### Auto Lookdev set up definitions ####",
"srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR if not os.path.exists(srcIMGhdrTex):",
"= (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type #### def",
"the shadow output in the Beauty Alpha setRmanShadow() # Get the string for",
"max=100, value=1, precision = 3, field = True, dragCommand ='placeholder', changeCommand = 'placeholder')",
"srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder",
"# Get the string for the maya project path def getProjectPath(): realProject =",
"= \"*.hdr\", dialogStyle = 1, fileMode = 1, dir = srcIMG ) #",
"scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) #",
"camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get",
"deactivate the learn light from results def setRmanShadow(): print(\"Set Renderman Shadow output in",
") # Change the path to the new HDR if __name__ == '__main__':",
"for it cameraFocal = cmds.optionMenu(label = \"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label",
"# Check if the RmanAutoLookdev folder exists in the project def checkFolderExists(srcIMGlookDev): if",
"= True, dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force update",
"#### you may not use this file except in compliance with the License.",
"v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type #### def updateBgType(bgStyle, *args): bgType",
"exists in the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder",
"folder') # Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng)",
"True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand =",
"Type cycloStyle = cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant",
"cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType, cycloStyle)) # Set",
"v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly)",
"*args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\": print(\"Setting the Background",
"changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max",
"*args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value to \"+ str(bgValue))",
"Lookdev as reference\") cmds.file(projectScene, r=True, uns = False ) # Set the output",
"+ \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng",
"Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40 ) # AutoLookdev cmds.button(label =",
"string # Layout cmds.separator( height = 40 ) # Set background Type bgStyle",
"\"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType, shadingBalls)) # Set the toggle",
"1) else : print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update",
"'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly,",
"srcIMG = project + \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get the",
"importLookdev(projectScene) # Set the shadow output in the Beauty Alpha setRmanShadow() # Get",
"for the script directory def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) #",
"uns = False ) # Set the output of the shadows in the",
"ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is",
"backgroundColor = [0.290, 0.705, 0.909], enableBackground = True, height = 80) cmds.separator() #",
"srcIMG + \"RmanAutoLookdev\" # Get the script Path folder before copying the HDRI",
"projectScene) # Import Lookdev as reference if it does not exist in the",
"= cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get the string for the script",
"cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size #### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider,",
"q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\",",
"\"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the",
"lookdevAuto(): # Get srcIMG folder in project and check if AutoLookdev folder is",
"Background Type #### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913)",
"= os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev folder exists in",
"it does not exist in the scene importLookdev(projectScene) # Set the shadow output",
"cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40 ) # AutoLookdev",
"Lookdev Size #### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\",",
"partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min",
"= True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min =",
"if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is already",
"Kill existing window if it is already displayed on the screen if cmds.window(\"winID\",",
"camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal",
"minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\",",
"height = 40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') #",
"print(\"Setting the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType",
"not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene)",
"already in the project RmanAutoLookdev folder') # Check and copy ColorChecker # Importing",
"= \"Change HDR\", command = \"changeHDRdef()\") # Reset # Show window - Need",
"window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1,",
"Copy the HDRI and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr",
"ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\": print(\"Setting the Shader Balls",
"= partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force update if the value",
"= True, changeCommand = partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle = cmds.floatSliderGrp(label",
"the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): #",
"print(\"Set Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update",
"Force update if the value is entered as string # Layout cmds.separator( height",
"not use this file except in compliance with the License. You may obtain",
"window ) #### Auto Lookdev set up definitions #### def lookdevAuto(): # Get",
"def lookdevAuto(): # Get srcIMG folder in project and check if AutoLookdev folder",
"cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size #### def updateSize(scaleSlider, *args): globalScale",
"v=True)) if camFocLength == \"classic - 50 mm\": print(\"Setting camera focal length to",
"0.02, precision = 3, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder')",
"return(scriptPath) # Check if the RmanAutoLookdev folder exists in the project def checkFolderExists(srcIMGlookDev):",
"= partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator( height = 40",
"def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns = False ) #",
"def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing",
"= 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight, tweakHeight),",
"cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get the string for the script directory",
"print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True))",
"# Kill existing window if it is already displayed on the screen if",
"the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check and copy HDR if",
"for the maya project path def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True )",
"and copy ColorChecker # Importing the lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev",
"camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic - 50 mm\": print(\"Setting",
"\"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if",
"# Set number of Shading balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand",
"= \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType,",
"Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit =",
"0, step = 0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder'",
"command = \"changeHDRdef()\") # Reset # Show window - Need update cmds.showWindow( window",
"changeDolly) def changeHDRdef(): # Get the new HDR path project = getProjectPath() srcIMG",
"v=True)) if bgType == \"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else:",
"as string # Layout cmds.separator( height = 40 ) # Set background Type",
"windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header",
"v=True)) if ballsType == \"Full\": print(\"Setting the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\",",
"scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100, value=1, precision = 3,",
"copy ColorChecker # Importing the lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev as",
"= cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label",
"cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label =",
"\"Full\": print(\"Setting the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting",
"0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit",
"print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal,",
"new HDR path project = getProjectPath() srcIMG = project + \"sourceimages/\" file =",
"focal length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label =",
"srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev scene exists",
"and deactivate the learn light from results def setRmanShadow(): print(\"Set Renderman Shadow output",
"cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode = 1, dir = srcIMG )",
"the lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns",
"file except in compliance with the License. You may obtain a copy of",
"as cmds import os from shutil import copyfile from functools import partial ####",
"camera focal length and compensates for it cameraFocal = cmds.optionMenu(label = \"Camera focal",
"window - Need update cmds.showWindow( window ) #### Auto Lookdev set up definitions",
"getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev folder",
"40 ) # Set Global Scale # Hidden slider, serves only to define",
"height = 40 ) # Set the camera focal length and compensates for",
"q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type #### def updateBgType(bgStyle, *args):",
"cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height =",
"#### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\", font =",
"= scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder +",
"calling the function as the variable is now defined cmds.floatSliderGrp(scaleSlider, edit = True,",
"compensates for it cameraFocal = cmds.optionMenu(label = \"Camera focal length\", changeCommand = 'placeholder')",
"\"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference",
"28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True))",
"= 1, fileMode = 1, dir = srcIMG ) # Change the path",
"cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) #",
"= 100, value = 0, step = 0.5, field = True, dragCommand =",
"= srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev +",
"scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex)",
"else: print('default Color Checker is already in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene,",
"updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType == \"Plane\": print(\"Setting the",
"(cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant Color\": print(\"Setting the Cyclo to constant",
"print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera",
"edit = True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min",
"folder exists in the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev",
"file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode = 1, dir =",
"srcIMGcolorCheckerPng) # Check if the Lookdev scene exists and copies it if not",
"functools import partial #### Copyright 2019 DUBOIX <NAME> and <NAME> - Licensed under",
"\"telelens - 85mm\": print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465)",
"exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex,",
"copyfile from functools import partial #### Copyright 2019 DUBOIX <NAME> and <NAME> -",
"= \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType, shadingBalls)) # Set the",
"copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is already in",
"tweakDepth)) # Layout cmds.separator( height = 40 ) # Change HDRI changeHDR =",
": print(\"Setting the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args):",
"Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True,",
"to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length to 28mm\")",
"= partial(camDolly, tweakDepth)) # Layout cmds.separator( height = 40 ) # Change HDRI",
"= 50, value = 0, step = 0.5, field = True, dragCommand =",
"+ \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex",
"= True, changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle = cmds.optionMenu(label",
"cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout",
"<NAME> and <NAME> - Licensed under the Apache License, Version 2.0 (the \"License\");",
"Check if the RmanAutoLookdev folder exists in the project def checkFolderExists(srcIMGlookDev): if not",
"max = 100, value = 0, step = 0.5, field = True, dragCommand",
"to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True))",
"changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator( height = 40 ) # Set",
"length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens -",
"height = 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40",
"else: print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight,",
"Type #### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if bgType ==",
"mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit",
"to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True,",
"\"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height",
"changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True,",
"Check and copy ColorChecker # Importing the lookdev as reference def importLookdev(projectScene): print(\"Import",
"\"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle,",
"bgValue) def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\":",
"DUBOIX <NAME> and <NAME> - Licensed under the Apache License, Version 2.0 (the",
"- Licensed under the Apache License, Version 2.0 (the \"License\"); #### you may",
"= 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue,",
"'placeholder') cmds.menuItem(label = \"classic - 50 mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label",
"if camFocLength == \"telelens - 85mm\": print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\",",
"= partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator( height = 40",
"tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max = 100, value",
"changeCommand = partial(shadingBallType, shadingBalls)) # Set the toggle for the colorchecker colorCheck =",
"= 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header ####",
"updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background",
"camFocLength == \"classic - 50 mm\": print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\",",
"1, dir = srcIMG ) # Change the path to the new HDR",
"v=True)) print(\"Setting the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args):",
"dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue,",
"cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle,",
"cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)])",
"\"Background Value\", min = 0, max = 1, value = 0.02, precision =",
"if colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\")",
"copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is already in the project",
"is now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize, scaleSlider), changeCommand =",
") # Set background Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\"",
"#### Copyright 2019 DUBOIX <NAME> and <NAME> - Licensed under the Apache License,",
"Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\",",
"to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color #### def udateBgValue(colorStyle, *args):",
"0) #### Update Lookdev Size #### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True,",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle,",
"min = -50, max = 50, value = 0, step = 0.5, field",
"#### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window if it is already displayed",
"Get srcIMG folder in project and check if AutoLookdev folder is present project",
"type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\",",
"AutoLookdev folder is present project = getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev",
"# Set Global Scale # Hidden slider, serves only to define varible scaleSlider",
"= partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\",",
"tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100,",
"srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is already in the project RmanAutoLookdev",
"height = 40 ) # Set background Type bgStyle = cmds.optionMenu( label=\"Background Type\",",
"the scene importLookdev(projectScene) # Set the shadow output in the Beauty Alpha setRmanShadow()",
"+ \"RmanAutoLookdev\" # Get the script Path folder before copying the HDRI scriptFolder",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens - 85mm\": print(\"Setting camera",
"- 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak",
"edit = True, dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout",
"slider, serves only to define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global Scale\",",
"'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue, colorStyle), changeCommand",
"Path folder before copying the HDRI scriptFolder = getScriptPath() # Create the folder",
"def shadingBallType(shadingBalls, *args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\": print(\"Setting",
"to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True))",
"changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator( height = 40 ) # Change",
"\"Constant Color\": print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting",
"cmds.floatSliderGrp(label = \"Background Value\", min = 0, max = 1, value = 0.02,",
"the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args):",
"project = getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\"",
"*args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type",
"= getScriptPath() # Create the folder if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI",
"dialogStyle = 1, fileMode = 1, dir = srcIMG ) # Change the",
"the License at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window if it",
"400) ) #### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman",
"scaleSlider)) # Force update if the value is entered as string # Layout",
"the RmanAutoLookdev folder exists in the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev)",
"50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength == \"telelens - 85mm\": print(\"Setting camera focal",
"Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader Balls",
"present project = getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev = srcIMG +",
"+ \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check if the RmanAutoLookdev folder exists in the project",
"\"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr =",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color #### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle,",
"cmds.floatSliderGrp(label = \"Lookdev Global Scale\", min=0.001, max=100, value=1, precision = 3, field =",
"dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force update if the",
"os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already in the project",
"cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType, bgStyle)) # Set",
"True, height = 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height =",
"the project RmanAutoLookdev folder') # Check and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex,",
"Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType, cycloStyle)) #",
"= [0.290, 0.705, 0.909], enableBackground = True, height = 80) cmds.separator() # Layout",
"#### def lookdevAuto(): # Get srcIMG folder in project and check if AutoLookdev",
"1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic",
"before copying the HDRI scriptFolder = getScriptPath() # Create the folder if needed",
"def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant Color\":",
"else : print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1)",
"# Layout cmds.separator( height = 40 ) # Set number of Shading balls",
"tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\", min = -50, max = 50, value =",
"bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\",",
"Layout cmds.separator( height = 40 ) # Set background Type bgStyle = cmds.optionMenu(",
"= True, height = 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height",
"\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType =",
"changeCommand = partial(updateSize, scaleSlider)) # Force update if the value is entered as",
"project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check and copy HDR if not",
"if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window windowWidth = 400 window =",
"os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is already in the",
"= False ) # Set the output of the shadows in the beauty",
") # AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator( height",
"the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\")",
"\"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr,",
"Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev",
"project = getProjectPath() srcIMG = project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\",",
"print('default Color Checker is already in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene):",
"q=True, v=True)) if cycloType == \"Constant Color\": print(\"Setting the Cyclo to constant color\")",
"#### Update Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True))",
"= True): cmds.deleteUI(\"winID\") # Window windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev',",
"# Set the camera focal length and compensates for it cameraFocal = cmds.optionMenu(label",
"cmds.optionMenu(label = \"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic - 50",
"= scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) #",
"= partial(updateSize, scaleSlider)) # Force update if the value is entered as string",
"scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR if",
"srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr,",
"folder if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker if needed srcIMGhdrTex",
"print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color ####",
"print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if camFocLength",
"\"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode = 1, dir",
"Layout cmds.separator( height = 40 ) # Change HDRI changeHDR = cmds.button(label =",
"*args): ballsType = (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\": print(\"Setting the Shader",
") #### Auto Lookdev set up definitions #### def lookdevAuto(): # Get srcIMG",
"= 40 ) # Change HDRI changeHDR = cmds.button(label = \"Change HDR\", command",
"enableBackground = True, height = 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator(",
"2.0 (the \"License\"); #### you may not use this file except in compliance",
"the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\")",
"Check and copy HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is",
"changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue, colorStyle), changeCommand =",
") # Set the output of the shadows in the beauty alpha and",
"Set the output of the shadows in the beauty alpha and deactivate the",
"cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant Color\": print(\"Setting the Cyclo",
"else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True,",
"= 0.5, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit",
"the License. You may obtain a copy of the License at: #### http://www.apache.org/licenses/LICENSE-2.0",
"1, fileMode = 1, dir = srcIMG ) # Change the path to",
"in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0) #### Update Lookdev Size #### def",
"label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True,",
"\"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get the script Path folder before",
"1) def colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\":",
"def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already exists\") # Check",
"True, dragCommand = partial(camDolly, tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator( height",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def",
"update cmds.showWindow( window ) #### Auto Lookdev set up definitions #### def lookdevAuto():",
"the folder if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker if needed",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update",
"is present project = getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev = srcIMG",
"# Importing the lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene,",
"#### Auto Lookdev set up definitions #### def lookdevAuto(): # Get srcIMG folder",
"if cycloType == \"Constant Color\": print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1)",
"the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type #### def",
"Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType",
"numberOfColumns=1 ) cmds.separator( height = 40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\", command",
"the variable is now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize, scaleSlider),",
"min = 0, max = 1, value = 0.02, precision = 3, field",
"import partial #### Copyright 2019 DUBOIX <NAME> and <NAME> - Licensed under the",
"scriptFolder = getScriptPath() # Create the folder if needed checkFolderExists(srcIMGlookDev) # Copy the",
"projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference if",
"Set Global Scale # Hidden slider, serves only to define varible scaleSlider =",
"srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already in the project RmanAutoLookdev folder')",
"reference def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns = False )",
"# Layout cmds.separator( height = 40 ) # Set the camera focal length",
"output of the shadows in the beauty alpha and deactivate the learn light",
"= 40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') # Layout",
"mm\": print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 133.103) else: if",
"camFocLength == \"telelens - 85mm\": print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85)",
") cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo Type",
"# Hidden slider, serves only to define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev",
"= partial(colorCheckerToggle, colorCheck)) # Layout cmds.separator( height = 40 ) # Set the",
"field = True, dragCommand ='placeholder', changeCommand = 'placeholder') # Slider calling the function",
"exists and copies it if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene =",
"length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length to",
"in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng):",
"== \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1)",
"and copy ColorChecker if not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color",
"srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\"",
"== \"classic - 50 mm\": print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50)",
"changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight, tweakHeight), changeCommand =",
"getProjectPath() srcIMG = project + \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength ==",
"changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth,",
"Lookdev scene exists and copies it if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\"",
"cmds.separator( height = 40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()')",
"ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength",
"height = 40 ) # Set Global Scale # Hidden slider, serves only",
"cmds.separator( height = 40 ) # Set background Type bgStyle = cmds.optionMenu( label=\"Background",
"cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40 ) # AutoLookdev cmds.button(label = \"AutoLookdev\",",
"except in compliance with the License. You may obtain a copy of the",
"= srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder +",
"RmanAutoLookdev folder exists in the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else:",
"infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) ####",
"\"*.hdr\", dialogStyle = 1, fileMode = 1, dir = srcIMG ) # Change",
"partial(udateBgValue, colorStyle)) # Layout cmds.separator( height = 40 ) # Set number of",
"on the screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window windowWidth =",
"\"classic - 50 mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label = \"widelens -",
"\"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex =",
"+ \"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28)",
"= 40 ) # Set background Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\")",
"Update Lookdev Size #### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale)",
"partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min = -100, max = 100,",
"is already in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check and",
"tweakDepth), changeCommand = partial(camDolly, tweakDepth)) # Layout cmds.separator( height = 40 ) #",
"copying the HDRI scriptFolder = getScriptPath() # Create the folder if needed checkFolderExists(srcIMGlookDev)",
"(the \"License\"); #### you may not use this file except in compliance with",
"= srcIMG + \"RmanAutoLookdev\" # Get the script Path folder before copying the",
"+ \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference if it does not",
"Camera Heigth\", min = -50, max = 50, value = 0, step =",
"# Copy the HDRI and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\"",
"exists\") # Check if the HDRI & ColorChecker exists in the RmanAutoLookdev folder",
"ColorChecker # Importing the lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev as reference\")",
"the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 1) def setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if",
"= getProjectPath() srcIMG = project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle",
"else: print(\"RmanAutoLookdev folder already exists\") # Check if the HDRI & ColorChecker exists",
"srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder + \"DefaultHDR.hdr.tex\"",
"141.465) else: print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def",
"*args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic - 50 mm\":",
"= cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label =",
"Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0) #### Update Background Color #### def udateBgValue(colorStyle, *args): bgValue",
"variable is now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize, scaleSlider), changeCommand",
"# AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator( height =",
"folder in project and check if AutoLookdev folder is present project = getProjectPath()",
"the Beauty Alpha setRmanShadow() # Get the string for the maya project path",
"displayed on the screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window windowWidth",
"Hidden slider, serves only to define varible scaleSlider = cmds.floatSliderGrp(label = \"Lookdev Global",
"q=True, rootDirectory=True ) return(realProject) # Get the string for the script directory def",
") # Set Global Scale # Hidden slider, serves only to define varible",
"Type bgStyle = cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" )",
"dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator( height =",
"cmds.optionMenu(label = \"Cyclo Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label =",
"'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand =",
"copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is",
"= (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the new HDR",
"the Shader Balls to minimal type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 1) def colorCheckerToggle(colorCheck, *args): colorCheckerState =",
"edit = True, dragCommand = partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth =",
"main(): # Kill existing window if it is already displayed on the screen",
"colorCheckerToggle(colorCheck, *args): colorCheckerState = (cmds.optionMenu(colorCheck, q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing the",
"v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the new HDR path project =",
"cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle",
"True, changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo Type cycloStyle = cmds.optionMenu(label =",
"+ \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as",
"return(realProject) # Get the string for the script directory def getScriptPath(): scriptPath =",
"the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls, *args): ballsType =",
"== \"Constant Color\": print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else :",
"print('default HDR is already in the project RmanAutoLookdev folder') # Check and copy",
"min = -100, max = 100, value = 0, step = 0.5, field",
"400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth, 400) ) #### Header #### cmds.rowColumnLayout(",
"Auto Lookdev set up definitions #### def lookdevAuto(): # Get srcIMG folder in",
"1) #### Update Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True,",
"def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get the string",
"the script directory def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath) # Check",
"# Check if the HDRI & ColorChecker exists in the RmanAutoLookdev folder def",
"# Get srcIMG folder in project and check if AutoLookdev folder is present",
"= scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder +",
"getProjectPath() srcIMG = project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle =",
"defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider))",
"= partial(shadingBallType, shadingBalls)) # Set the toggle for the colorchecker colorCheck = cmds.optionMenu(label",
"RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): # Check and copy HDR if not os.path.exists(projectScene):",
"= \"telelens - 85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True,",
"Version 2.0 (the \"License\"); #### you may not use this file except in",
"in the project def checkFolderExists(srcIMGlookDev): if not os.path.exists(srcIMGlookDev): os.makedirs(srcIMGlookDev) else: print(\"RmanAutoLookdev folder already",
"changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly, tweakDepth), changeCommand",
"= True, dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator(",
"'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand =",
"# Get the script Path folder before copying the HDRI scriptFolder = getScriptPath()",
"# Check and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else:",
"globalScale) #### Update Background Type #### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True,",
"q=True, v=True)) print(\"Setting the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def shadingBallType(shadingBalls,",
"cmds.menuItem(label = \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType,",
"camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight",
"scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force update if the value is entered",
"-100, max = 100, value = 0, step = 0.5, field = True,",
"cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) #### Update Background Type #### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle,",
"already displayed on the screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window",
"under the Apache License, Version 2.0 (the \"License\"); #### you may not use",
"Create the folder if needed checkFolderExists(srcIMGlookDev) # Copy the HDRI and ColorChecker if",
"Heigth\", min = -50, max = 50, value = 0, step = 0.5,",
"# Show window - Need update cmds.showWindow( window ) #### Auto Lookdev set",
"HDR if not os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is already in the",
"color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo to Grid texture\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 0)",
"def setRmanShadow(): print(\"Set Renderman Shadow output in Beauty's Alpha\") cmds.setAttr(\"rmanGlobals.outputShadowAOV\", 1) cmds.setAttr(\"rmanGlobals.learnLightSelection\", 0)",
"print(\"setting camera focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args):",
"already exists\") # Check if the HDRI & ColorChecker exists in the RmanAutoLookdev",
"q=True, v=True)) if colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding",
"= partial(camHeight, tweakHeight), changeCommand = partial(camHeight, tweakHeight)) tweakDepth = cmds.floatSliderGrp(label=\"Tweak Camera Dolly\", min",
"and copies it if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev",
"as the variable is now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand = partial(updateSize,",
"Update Background Type #### def updateBgType(bgStyle, *args): bgType = (cmds.optionMenu(bgStyle, q=True, v=True)) if",
"Color\": print(\"Setting the Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the",
"<filename>AutoLookdev_Rman_v01_0/RenderMan_AutoLookDev.py<gh_stars>0 import maya.cmds as cmds import os from shutil import copyfile from functools",
"= 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand = partial(camDolly,",
"Get the new HDR path project = getProjectPath() srcIMG = project + \"sourceimages/\"",
"path def getProjectPath(): realProject = cmds.workspace( q=True, rootDirectory=True ) return(realProject) # Get the",
"else: print('Lookdev scene is already in the project RmanAutoLookdev folder') # Check and",
"focal length and compensates for it cameraFocal = cmds.optionMenu(label = \"Camera focal length\",",
"the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the Shader",
"and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR",
"in the Beauty Alpha setRmanShadow() # Get the string for the maya project",
"checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev",
"print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo Type ####",
"AutoLookdev cmds.button(label = \"AutoLookdev\", command = 'lookdevAuto()') # Layout cmds.separator( height = 40",
"cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera Heigth\",",
"'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(tweakHeight, edit = True, dragCommand = partial(camHeight, tweakHeight), changeCommand",
"Update Cyclo Type #### def updateCycloType(cycloStyle, *args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if",
"srcIMGhdrHdr) else: print('default HDR is already in the project RmanAutoLookdev folder') # Check",
"True, changeCommand = partial(shadingBallType, shadingBalls)) # Set the toggle for the colorchecker colorCheck",
"window if it is already displayed on the screen if cmds.window(\"winID\", exists =",
"True, changeCommand = partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle = cmds.floatSliderGrp(label =",
"min=0.001, max=100, value=1, precision = 3, field = True, dragCommand ='placeholder', changeCommand =",
"scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng)",
"label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType, bgStyle)) # Set Cyclo",
"RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check",
"check if AutoLookdev folder is present project = getProjectPath() srcIMG = project +",
"HDR\", command = \"changeHDRdef()\") # Reset # Show window - Need update cmds.showWindow(",
"the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to Cyclo \")",
"scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev scene exists and copies it",
"= project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter = \"*.hdr\", dialogStyle = 1, fileMode",
"50 mm\") cmds.menuItem(label = \"telelens - 85mm\") cmds.menuItem(label = \"widelens - 28mm\") cmds.optionMenu(cameraFocal,",
"Camera Dolly\", min = -100, max = 100, value = 0, step =",
"os from shutil import copyfile from functools import partial #### Copyright 2019 DUBOIX",
"85mm\": print(\"Setting camera focal length to 85mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 85) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 141.465) else: print(\"setting",
"HDR path project = getProjectPath() srcIMG = project + \"sourceimages/\" file = cmds.fileDialog2(fileFilter",
"dragCommand = 'placeholder', changeCommand = 'placeholder' ) cmds.floatSliderGrp(tweakDepth, edit = True, dragCommand =",
"srcIMGcolorCheckerPng): # Check and copy HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr)",
"if not scriptScene = scriptFolder + \"Lookdev_Scene_v01.ma\" projectScene = srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene,",
"Import Lookdev as reference if it does not exist in the scene importLookdev(projectScene)",
"# Import Lookdev as reference if it does not exist in the scene",
"at: #### http://www.apache.org/licenses/LICENSE-2.0 def main(): # Kill existing window if it is already",
"\"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType, cycloStyle))",
"\"widelens - 28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight =",
"== \"Full\": print(\"Setting the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else :",
"field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True,",
"1, value = 0.02, precision = 3, field = True, dragCommand = 'placeholder',",
"edit = True, changeCommand = partial(updateCycloType, cycloStyle)) # Set Background Color colorStyle =",
"srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng): # Check and copy HDR if not",
"partial(updateSize, scaleSlider), changeCommand = partial(updateSize, scaleSlider)) # Force update if the value is",
"shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder') cmds.menuItem(label = \"Full\") cmds.menuItem(label",
"Set the camera focal length and compensates for it cameraFocal = cmds.optionMenu(label =",
"\"DefaultColorChecker.png.tex\" scriptColorCheckerPng = scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex,",
"print(\"Setting the Shader Balls to full type\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ShaderBalls\", 0) else : print(\"Setting the",
"\"classic - 50 mm\": print(\"Setting camera focal length to 50mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 50) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\",",
"== \"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background",
"focal length to 28mm\") cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CamShape.focalLength\", 28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight =",
"#### Header #### cmds.rowColumnLayout( numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\",",
"for the colorchecker colorCheck = cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label =",
"= \"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType, shadingBalls))",
"Type\", changeCommand = 'placeholder') cmds.menuItem(label = \"Constant Color\") cmds.menuItem(label = \"Checker\") cmds.optionMenu(cycloStyle, edit",
"cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def",
"scriptFolder + \"DefaultColorChecker.png\" checkHdrExists(scriptHdrTex, scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check",
"'placeholder') # Slider calling the function as the variable is now defined cmds.floatSliderGrp(scaleSlider,",
"\"Checker\") cmds.optionMenu(cycloStyle, edit = True, changeCommand = partial(updateCycloType, cycloStyle)) # Set Background Color",
"= 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit = True, dragCommand = partial(udateBgValue, colorStyle),",
"function as the variable is now defined cmds.floatSliderGrp(scaleSlider, edit = True, dragCommand =",
"srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng = srcIMGlookDev + \"/DefaultColorChecker.png\" scriptHdrTex = scriptFolder",
"(cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args): changeDolly = (cmds.floatSliderGrp(tweakDepth, q=True, v=True))",
"\"Full\") cmds.menuItem(label = \"Minimal\") cmds.optionMenu(shadingBalls, edit = True, changeCommand = partial(shadingBallType, shadingBalls)) #",
"Alpha setRmanShadow() # Get the string for the maya project path def getProjectPath():",
"os.path.exists(projectScene): copyfile(scriptScene, projectScene) else: print('Lookdev scene is already in the project RmanAutoLookdev folder')",
") # Change HDRI changeHDR = cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\")",
"setCamFocal(cameraFocal, *args): camFocLength = (cmds.optionMenu(cameraFocal, q=True, v=True)) if camFocLength == \"classic - 50",
"not exist in the scene importLookdev(projectScene) # Set the shadow output in the",
"True, dragCommand = partial(udateBgValue, colorStyle), changeCommand = partial(udateBgValue, colorStyle)) # Layout cmds.separator( height",
"def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight) def camDolly(tweakDepth, *args):",
"Background Color #### def udateBgValue(colorStyle, *args): bgValue = (cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the",
"number of Shading balls shadingBalls = cmds.optionMenu(label = \"Shader Balls\", changeCommand = 'placeholder')",
"(cmds.floatSliderGrp(colorStyle, q=True, v=True)) print(\"Setting the Background Value to \"+ str(bgValue)) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Background_Color\", bgValue) def",
"\"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev + \"/DefaultHDR.hdr\" srcIMGcolorCheckerTex = srcIMGlookDev + \"/DefaultColorChecker.png.tex\" srcIMGcolorCheckerPng =",
"100, value = 0, step = 0.5, field = True, dragCommand = 'placeholder',",
"+ \"sourceimages/\" srcIMGlookDev = srcIMG + \"RmanAutoLookdev\" # Get the script Path folder",
"scene importLookdev(projectScene) # Set the shadow output in the Beauty Alpha setRmanShadow() #",
"HDRI and ColorChecker if needed srcIMGhdrTex = srcIMGlookDev + \"/DefaultHDR.hdr.tex\" srcIMGhdrHdr = srcIMGlookDev",
"compliance with the License. You may obtain a copy of the License at:",
"Color Checker is already in the project RmanAutoLookdev folder') def checkSceneExists(scriptScene, projectScene): #",
"\"DefaultHDR.hdr.tex\" scriptHdrHdr = scriptFolder + \"DefaultHDR.hdr\" scriptColorCheckerTex = scriptFolder + \"DefaultColorChecker.png.tex\" scriptColorCheckerPng =",
"q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateZ\", changeDolly) def changeHDRdef(): # Get the new HDR path project",
"if the HDRI & ColorChecker exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex, scriptHdrHdr,",
"Need update cmds.showWindow( window ) #### Auto Lookdev set up definitions #### def",
"Check if the HDRI & ColorChecker exists in the RmanAutoLookdev folder def checkHdrExists(scriptHdrTex,",
"0.705, 0.909], enableBackground = True, height = 80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1",
"= 1, value = 0.02, precision = 3, field = True, dragCommand =",
"# Change HDRI changeHDR = cmds.button(label = \"Change HDR\", command = \"changeHDRdef()\") #",
") cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand = partial(updateBgType, bgStyle)) #",
"entered as string # Layout cmds.separator( height = 40 ) # Set background",
"= cmds.optionMenu( label=\"Background Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit",
"if the Lookdev scene exists and copies it if not scriptScene = scriptFolder",
"srcIMGlookDev + \"/Lookdev_Scene_v01.ma\" checkSceneExists(scriptScene, projectScene) # Import Lookdev as reference if it does",
"lookdev as reference def importLookdev(projectScene): print(\"Import Lookdev as reference\") cmds.file(projectScene, r=True, uns =",
"\"Plane\": print(\"Setting the Background to infinite\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 0) else: print(\"Setting the Background to",
"0) else: print(\"Setting the Background to Cyclo \") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.BackgroundType\", 1) #### Update Cyclo",
"# Set Background Color colorStyle = cmds.floatSliderGrp(label = \"Background Value\", min = 0,",
"# Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40 ) # AutoLookdev cmds.button(label",
"not os.path.exists(srcIMGcolorCheckerTex): copyfile(scriptColorCheckerTex, srcIMGcolorCheckerTex) copyfile(scriptColorCheckerPng, srcIMGcolorCheckerPng) else: print('default Color Checker is already in",
"True): cmds.deleteUI(\"winID\") # Window windowWidth = 400 window = cmds.window(\"winID\", title=\"Renderman_Auto_Lookdev\", iconName='RmanLookDev', widthHeight=(windowWidth,",
"cameraFocal = cmds.optionMenu(label = \"Camera focal length\", changeCommand = 'placeholder') cmds.menuItem(label = \"classic",
"Type\", changeCommand=\"placeholder\") cmds.menuItem( label=\"Plane\" ) cmds.menuItem( label=\"Cyclo\" ) cmds.optionMenu(bgStyle, edit = True, changeCommand",
"= cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\")",
"28mm\") cmds.optionMenu(cameraFocal, edit = True, changeCommand = partial(setCamFocal, cameraFocal)) tweakHeight = cmds.floatSliderGrp(label=\"Tweak Camera",
"shutil import copyfile from functools import partial #### Copyright 2019 DUBOIX <NAME> and",
"cmds.separator( height = 40 ) # Set Global Scale # Hidden slider, serves",
"if AutoLookdev folder is present project = getProjectPath() srcIMG = project + \"sourceimages/\"",
"the screen if cmds.window(\"winID\", exists = True): cmds.deleteUI(\"winID\") # Window windowWidth = 400",
"scriptHdrHdr, srcIMGhdrTex, srcIMGhdrHdr, scriptColorCheckerTex, scriptColorCheckerPng, srcIMGcolorCheckerTex, srcIMGcolorCheckerPng) # Check if the Lookdev scene",
"HDR if not os.path.exists(srcIMGhdrTex): copyfile(scriptHdrTex, srcIMGhdrTex) copyfile(scriptHdrHdr, srcIMGhdrHdr) else: print('default HDR is already",
"= \"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground",
"80) cmds.separator() # Layout cmds.rowColumnLayout( numberOfColumns=1 ) cmds.separator( height = 40 ) #",
"cmds.optionMenu(label =\"Color Checker\", changeCommand = 'placeholder') cmds.menuItem(label = \"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck,",
"the HDRI scriptFolder = getScriptPath() # Create the folder if needed checkFolderExists(srcIMGlookDev) #",
"Cyclo to constant color\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_Cyclo_Type\", 1) else : print(\"Setting the Cyclo to Grid",
"setRmanShadow() # Get the string for the maya project path def getProjectPath(): realProject",
"28) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_Cam_focal_compensator.translateZ\", 127.913) def camHeight(tweakHeight, *args): changeHeight = (cmds.floatSliderGrp(tweakHeight, q=True, v=True)) cmds.setAttr(\"Lookdev_Scene_v01_Lookdev_CameraScale_LOC.translateY\", changeHeight)",
"command = 'lookdevAuto()') # Layout cmds.separator( height = 40 ) # Set Global",
"#### def updateSize(scaleSlider, *args): globalScale = (cmds.floatSliderGrp(scaleSlider, q=True, v=True)) print(globalScale) cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_GlobalScale\", globalScale) ####",
"numberOfColumns=1, columnWidth=[(1, windowWidth)]) cmds.text( label = \"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor",
"= 3, field = True, dragCommand = 'placeholder', changeCommand = 'placeholder') cmds.floatSliderGrp(colorStyle, edit",
"# Check and copy ColorChecker # Importing the lookdev as reference def importLookdev(projectScene):",
"\"On\") cmds.menuItem(label = \"Off\") cmds.optionMenu(colorCheck, edit = True, changeCommand = partial(colorCheckerToggle, colorCheck)) #",
"colorCheckerState == \"On\": print(\"Showing the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\", 0) else: print(\"Hiding the ColorChecker\") cmds.setAttr(\"Lookdev_Scene_v01_Rman_Lookdev_CTRL.Lookdev_ColorChecker\",",
"\"Renderman Auto Lookdev\", font = \"boldLabelFont\", backgroundColor = [0.290, 0.705, 0.909], enableBackground =",
"*args): cycloType = (cmds.optionMenu(cycloStyle, q=True, v=True)) if cycloType == \"Constant Color\": print(\"Setting the",
"False ) # Set the output of the shadows in the beauty alpha",
"= (cmds.optionMenu(shadingBalls, q=True, v=True)) if ballsType == \"Full\": print(\"Setting the Shader Balls to",
"string for the script directory def getScriptPath(): scriptPath = os.path.expanduser('~') + \"/maya/2019/scripts/AutoLookdev_Rman_v01_0/\" return(scriptPath)",
"Copyright 2019 DUBOIX <NAME> and <NAME> - Licensed under the Apache License, Version"
] |
[
"keycloak for this application. :param password: the password registered with keycloak for this",
"Sirix(username=username, password=password, client=client,) await s.authenticate() return s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\",",
"password: the password registered with keycloak for this application. :param client: an ``httpx.AsyncClient``",
"pysirix.errors import SirixServerError from pysirix.types import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff,",
"s async def sirix_async( username: str, password: str, client: httpx.AsyncClient, ) -> Sirix:",
"\"\"\" s = Sirix(username=username, password=password, client=client,) await s.authenticate() return s __all__ = [",
"the url for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) await",
"sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate() return s async def",
"import httpx from pysirix.sirix import Sirix from pysirix.database import Database from pysirix.resource import",
"instance with the ``base_url`` param as the url for the sirix database. \"\"\"",
"= [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\",",
"client: an ``httpx.AsyncClient`` instance. You should instantiate the instance with the ``base_url`` param",
"import Sirix from pysirix.database import Database from pysirix.resource import Resource from pysirix.json_store import",
":param client: an ``httpx.Client`` instance. You should instantiate the instance with the ``base_url``",
"s = Sirix(username=username, password=password, client=client,) await s.authenticate() return s __all__ = [ \"sirix_sync\",",
"application. :param client: an ``httpx.AsyncClient`` instance. You should instantiate the instance with the",
"\"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\", \"UpdateDiff\", \"DeleteDiff\",",
"username registered with keycloak for this application. :param password: the password registered with",
"Metadata, MetaNode, ) def sirix_sync(username: str, password: str, client: httpx.Client,) -> Sirix: \"\"\"",
"``base_url`` param as the url for the sirix database. \"\"\" s = Sirix(username=username,",
"from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors",
"[ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\",",
"from pysirix.database import Database from pysirix.resource import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync",
"username: str, password: str, client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param username: the",
"the password registered with keycloak for this application. :param client: an ``httpx.AsyncClient`` instance.",
"httpx.Client,) -> Sirix: \"\"\" :param username: the username registered with keycloak for this",
"instantiate the instance with the ``base_url`` param as the url for the sirix",
"this application. :param password: the password registered with keycloak for this application. :param",
"from pysirix.errors import SirixServerError from pysirix.types import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff,",
"async def sirix_async( username: str, password: str, client: httpx.AsyncClient, ) -> Sirix: \"\"\"",
"application. :param client: an ``httpx.Client`` instance. You should instantiate the instance with the",
"MetaNode, ) def sirix_sync(username: str, password: str, client: httpx.Client,) -> Sirix: \"\"\" :param",
":param client: an ``httpx.AsyncClient`` instance. You should instantiate the instance with the ``base_url``",
"``httpx.AsyncClient`` instance. You should instantiate the instance with the ``base_url`` param as the",
"password registered with keycloak for this application. :param client: an ``httpx.AsyncClient`` instance. You",
"the username registered with keycloak for this application. :param password: the password registered",
"password=password, client=client,) s.authenticate() return s async def sirix_async( username: str, password: str, client:",
"str, client: httpx.Client,) -> Sirix: \"\"\" :param username: the username registered with keycloak",
"registered with keycloak for this application. :param client: an ``httpx.Client`` instance. You should",
"password registered with keycloak for this application. :param client: an ``httpx.Client`` instance. You",
"the password registered with keycloak for this application. :param client: an ``httpx.Client`` instance.",
"import Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types import ( QueryResult,",
"for this application. :param client: an ``httpx.AsyncClient`` instance. You should instantiate the instance",
":param username: the username registered with keycloak for this application. :param password: the",
"\"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\", \"UpdateDiff\", \"DeleteDiff\", \"Metadata\",",
"an ``httpx.Client`` instance. You should instantiate the instance with the ``base_url`` param as",
"UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str, password: str, client: httpx.Client,) ->",
"\"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\", \"UpdateDiff\",",
"import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift",
"TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types import ( QueryResult, Commit, Revision, InsertDiff,",
"for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) await s.authenticate() return",
"import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def",
"application. :param password: the password registered with keycloak for this application. :param client:",
":param password: the password registered with keycloak for this application. :param client: an",
"You should instantiate the instance with the ``base_url`` param as the url for",
"<reponame>sirixdb/sirix-python-client import httpx from pysirix.sirix import Sirix from pysirix.database import Database from pysirix.resource",
"from pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types import",
"SirixServerError from pysirix.types import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata,",
"sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) await s.authenticate() return s __all__",
"sirix_async( username: str, password: str, client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param username:",
"registered with keycloak for this application. :param password: the password registered with keycloak",
"the ``base_url`` param as the url for the sirix database. \"\"\" s =",
"= Sirix(username=username, password=password, client=client,) await s.authenticate() return s __all__ = [ \"sirix_sync\", \"sirix_async\",",
"username: the username registered with keycloak for this application. :param password: the password",
"for this application. :param client: an ``httpx.Client`` instance. You should instantiate the instance",
"Sirix: \"\"\" :param username: the username registered with keycloak for this application. :param",
"s = Sirix(username=username, password=password, client=client,) s.authenticate() return s async def sirix_async( username: str,",
"Sirix(username=username, password=password, client=client,) s.authenticate() return s async def sirix_async( username: str, password: str,",
"url for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) await s.authenticate()",
"def sirix_async( username: str, password: str, client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param",
"import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError",
"the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) await s.authenticate() return s",
"\"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\",",
"with the ``base_url`` param as the url for the sirix database. \"\"\" s",
"-> Sirix: \"\"\" :param username: the username registered with keycloak for this application.",
"s.authenticate() return s async def sirix_async( username: str, password: str, client: httpx.AsyncClient, )",
"client: an ``httpx.Client`` instance. You should instantiate the instance with the ``base_url`` param",
"def sirix_sync(username: str, password: str, client: httpx.Client,) -> Sirix: \"\"\" :param username: the",
"\"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\", \"UpdateDiff\", \"DeleteDiff\", \"Metadata\", \"MetaNode\", \"TimeAxisShift\", ]",
"registered with keycloak for this application. :param client: an ``httpx.AsyncClient`` instance. You should",
"from pysirix.sirix import Sirix from pysirix.database import Database from pysirix.resource import Resource from",
"DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str, password: str, client: httpx.Client,) -> Sirix:",
"\"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate() return s async def sirix_async( username:",
"\"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\", \"UpdateDiff\", \"DeleteDiff\", \"Metadata\", \"MetaNode\", \"TimeAxisShift\",",
"the url for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate()",
"with keycloak for this application. :param client: an ``httpx.Client`` instance. You should instantiate",
"instance. You should instantiate the instance with the ``base_url`` param as the url",
"from pysirix.types import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode,",
"JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError from",
"return s async def sirix_async( username: str, password: str, client: httpx.AsyncClient, ) ->",
") -> Sirix: \"\"\" :param username: the username registered with keycloak for this",
"= Sirix(username=username, password=password, client=client,) s.authenticate() return s async def sirix_async( username: str, password:",
"httpx from pysirix.sirix import Sirix from pysirix.database import Database from pysirix.resource import Resource",
"client=client,) s.authenticate() return s async def sirix_async( username: str, password: str, client: httpx.AsyncClient,",
"with keycloak for this application. :param password: the password registered with keycloak for",
"this application. :param client: an ``httpx.Client`` instance. You should instantiate the instance with",
"__all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\",",
"pysirix.resource import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType,",
"InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str, password: str, client:",
"\"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\",",
"httpx.AsyncClient, ) -> Sirix: \"\"\" :param username: the username registered with keycloak for",
"DBType, TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types import ( QueryResult, Commit, Revision,",
"Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift from",
"should instantiate the instance with the ``base_url`` param as the url for the",
"the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate() return s async",
"s.authenticate() return s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\",",
"password: the password registered with keycloak for this application. :param client: an ``httpx.Client``",
"keycloak for this application. :param client: an ``httpx.AsyncClient`` instance. You should instantiate the",
"password: str, client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param username: the username registered",
"with keycloak for this application. :param client: an ``httpx.AsyncClient`` instance. You should instantiate",
"an ``httpx.AsyncClient`` instance. You should instantiate the instance with the ``base_url`` param as",
"( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username:",
"Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types import ( QueryResult, Commit,",
"from pysirix.resource import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert,",
"Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str, password: str,",
"database. \"\"\" s = Sirix(username=username, password=password, client=client,) await s.authenticate() return s __all__ =",
"sirix_sync(username: str, password: str, client: httpx.Client,) -> Sirix: \"\"\" :param username: the username",
"for this application. :param password: the password registered with keycloak for this application.",
"str, password: str, client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param username: the username",
"keycloak for this application. :param client: an ``httpx.Client`` instance. You should instantiate the",
"client: httpx.Client,) -> Sirix: \"\"\" :param username: the username registered with keycloak for",
"str, client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param username: the username registered with",
"str, password: str, client: httpx.Client,) -> Sirix: \"\"\" :param username: the username registered",
") def sirix_sync(username: str, password: str, client: httpx.Client,) -> Sirix: \"\"\" :param username:",
"pysirix.sirix import Sirix from pysirix.database import Database from pysirix.resource import Resource from pysirix.json_store",
"pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors import",
"password=password, client=client,) await s.authenticate() return s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\",",
"import SirixServerError from pysirix.types import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff,",
"Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str, password:",
"\"\"\" :param username: the username registered with keycloak for this application. :param password:",
"Sirix from pysirix.database import Database from pysirix.resource import Resource from pysirix.json_store import JsonStoreSync,",
"pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types import (",
"client: httpx.AsyncClient, ) -> Sirix: \"\"\" :param username: the username registered with keycloak",
"client=client,) await s.authenticate() return s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\",",
"this application. :param client: an ``httpx.AsyncClient`` instance. You should instantiate the instance with",
"pysirix.types import ( QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, )",
"QueryResult, Commit, Revision, InsertDiff, ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str,",
"``httpx.Client`` instance. You should instantiate the instance with the ``base_url`` param as the",
"ReplaceDiff, UpdateDiff, DeleteDiff, Metadata, MetaNode, ) def sirix_sync(username: str, password: str, client: httpx.Client,)",
"\"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\",",
"Database from pysirix.resource import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants import",
"JsonStoreAsync from pysirix.constants import Insert, DBType, TimeAxisShift from pysirix.errors import SirixServerError from pysirix.types",
"s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\",",
"for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate() return s",
"database. \"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate() return s async def sirix_async(",
"password: str, client: httpx.Client,) -> Sirix: \"\"\" :param username: the username registered with",
"param as the url for the sirix database. \"\"\" s = Sirix(username=username, password=password,",
"await s.authenticate() return s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\",",
"return s __all__ = [ \"sirix_sync\", \"sirix_async\", \"Sirix\", \"SirixServerError\", \"Database\", \"Resource\", \"JsonStoreSync\", \"JsonStoreAsync\",",
"\"JsonStoreSync\", \"JsonStoreAsync\", \"Insert\", \"DBType\", \"QueryResult\", \"Commit\", \"Revision\", \"InsertDiff\", \"ReplaceDiff\", \"UpdateDiff\", \"DeleteDiff\", \"Metadata\", \"MetaNode\",",
"import Database from pysirix.resource import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from pysirix.constants",
"as the url for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,)",
"pysirix.database import Database from pysirix.resource import Resource from pysirix.json_store import JsonStoreSync, JsonStoreAsync from",
"the instance with the ``base_url`` param as the url for the sirix database.",
"url for the sirix database. \"\"\" s = Sirix(username=username, password=password, client=client,) s.authenticate() return"
] |
[
"else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book': answer_message =",
"= None, adding: Optional[bool] = False, element: Optional[object] = None, position: Optional[int] =",
"f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n'",
"import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]]",
"from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder,",
"f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message = f'{position}) {stick_done}",
"element: Optional[object] = None, position: Optional[int] = None) -> str: if adding: stick_done,",
"Optional[int] = None, ) -> str: if reminder_type == 'temp': answer_message = f'{position})",
"from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool]",
"if adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element,",
"str: if reminder_type == 'temp': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif",
"stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool] = False, element:",
"adding: Optional[bool] = False, element: Optional[object] = None, position: Optional[int] = None) ->",
"not answer.type == 'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message",
"- {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n'",
"stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book': answer_message = f'{stick_done} {stick_type}",
"<gh_stars>0 from typing import Union, Optional from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from",
"Optional[int] = None) -> str: if adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message",
"element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book':",
"{element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else:",
"= stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type",
"-> str: if adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done,",
"stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n",
"= f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done: str,",
"id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type:",
"{answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def",
"= f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message = f'{position})",
"Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool] = False, element: Optional[object] = None,",
"Optional[bool] = False, element: Optional[object] = None, position: Optional[int] = None) -> str:",
"{stick_done} {stick_type} - {element[1]}\\n' else: answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' return",
"import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool] = False,",
"= stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book': answer_message = f'{stick_done} {stick_type} -",
"stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type =",
"position: Optional[int] = None, ) -> str: if reminder_type == 'temp': answer_message =",
"f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}'",
"= None) -> str: if adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message =",
"PermanentReminder, Bookmark]] = None, adding: Optional[bool] = False, element: Optional[object] = None, position:",
"TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] =",
"answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message = f'{position}) {stick_done} {stick_type}",
"{stick_type} - {element[1]}\\n' else: answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' return answer_message",
") -> str: if reminder_type == 'temp': answer_message = f'{position}) {stick_done} {stick_type} -",
"reminder_type == 'temp': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type ==",
"reminder_type == 'perm': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type ==",
"{stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message = f'{position}) {stick_done} {stick_type} -",
"if not answer.type == 'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else:",
"None, position: Optional[int] = None) -> str: if adding: stick_done, stick_type = stickers_recognize(element[4],",
"elif reminder_type == 'perm': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type",
"{stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message = f'{position}) {stick_done} {stick_type}",
"answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done:",
"stick_done: str, stick_type: str, element: object, position: Optional[int] = None, ) -> str:",
"{element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif",
"{answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done: str, stick_type: str, element: object,",
"str: if adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type,",
"Union, Optional from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def",
"Optional from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer:",
"answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool] = False, element: Optional[object] =",
"'perm': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message",
"= f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message = f'{position}) {stick_done} {stick_type} -",
"element: object, position: Optional[int] = None, ) -> str: if reminder_type == 'temp':",
"typing import Union, Optional from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import",
"stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type",
"answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} -",
"stick_type=stick_type, element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type ==",
"= None, position: Optional[int] = None) -> str: if adding: stick_done, stick_type =",
"{stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message = f'{position}) {stick_done} {stick_type}",
"message_form(reminder_type: str, stick_done: str, stick_type: str, element: object, position: Optional[int] = None, )",
"= f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n",
"Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding:",
"if reminder_type == 'temp': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type",
"{stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done: str, stick_type: str,",
"str, stick_type: str, element: object, position: Optional[int] = None, ) -> str: if",
"elif reminder_type == 'book': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message",
"def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool] = False, element: Optional[object]",
"== 'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done}",
"f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done: str, stick_type:",
"message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not",
"import Union, Optional from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize",
"{stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return",
"-> str: if reminder_type == 'temp': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n'",
"answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message =",
"stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book': answer_message = f'{stick_done}",
"== 'perm': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book':",
"answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type)",
"= f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message = f'{position})",
"str, stick_done: str, stick_type: str, element: object, position: Optional[int] = None, ) ->",
"f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message = f'{position}) {stick_done}",
"else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str,",
"return answer_message def message_form(reminder_type: str, stick_done: str, stick_type: str, element: object, position: Optional[int]",
"object, position: Optional[int] = None, ) -> str: if reminder_type == 'temp': answer_message",
"stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else:",
"position: Optional[int] = None) -> str: if adding: stick_done, stick_type = stickers_recognize(element[4], element[2])",
"app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder,",
"answer_message def message_form(reminder_type: str, stick_done: str, stick_type: str, element: object, position: Optional[int] =",
"app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None, adding: Optional[bool] =",
"= None, ) -> str: if reminder_type == 'temp': answer_message = f'{position}) {stick_done}",
"== 'book': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message = f'{position})",
"== 'temp': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm':",
"position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if not answer.type == 'book': answer_message",
"False, element: Optional[object] = None, position: Optional[int] = None) -> str: if adding:",
"= False, element: Optional[object] = None, position: Optional[int] = None) -> str: if",
"str, element: object, position: Optional[int] = None, ) -> str: if reminder_type ==",
"Optional[object] = None, position: Optional[int] = None) -> str: if adding: stick_done, stick_type",
"None, ) -> str: if reminder_type == 'temp': answer_message = f'{position}) {stick_done} {stick_type}",
"answer.type == 'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message =",
"def message_form(reminder_type: str, stick_done: str, stick_type: str, element: object, position: Optional[int] = None,",
"None, adding: Optional[bool] = False, element: Optional[object] = None, position: Optional[int] = None)",
"PermanentReminder, Bookmark from app.utility.stickers import stickers_recognize def answer_forms(answer: Optional[Union[TemporaryReminder, PermanentReminder, Bookmark]] = None,",
"- {answer.title}:\\n id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done: str, stick_type: str, element:",
"element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done,",
"adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position)",
"- {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n id:{answer.id}' return answer_message",
"Bookmark]] = None, adding: Optional[bool] = False, element: Optional[object] = None, position: Optional[int]",
"'temp': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n' elif reminder_type == 'perm': answer_message",
"reminder_type == 'book': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message =",
"from typing import Union, Optional from app.utility.schemas import TemporaryReminder, PermanentReminder, Bookmark from app.utility.stickers",
"None) -> str: if adding: stick_done, stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2],",
"= message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done, stick_type = stickers_recognize(answer.is_done, answer.type) if",
"'book': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n' else: answer_message = f'{position}) {stick_done}",
"stick_type: str, element: object, position: Optional[int] = None, ) -> str: if reminder_type",
"answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message =",
"answer.type) if not answer.type == 'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}'",
"'book': answer_message = f'{stick_done} {stick_type} - {answer.title}:\\n{answer.date}\\n id:{answer.id}' else: answer_message = f'{stick_done} {stick_type}",
"id:{answer.id}' return answer_message def message_form(reminder_type: str, stick_done: str, stick_type: str, element: object, position:",
"stick_type = stickers_recognize(element[4], element[2]) answer_message = message_form(reminder_type=element[2], stick_done=stick_done, stick_type=stick_type, element=element, position=position) else: stick_done,",
"{stick_type} - {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message = f'{position}) {stick_done} {stick_type} -",
"- {element[1]}:\\n{element[3]}\\n{element[5]}\\n' elif reminder_type == 'book': answer_message = f'{position}) {stick_done} {stick_type} - {element[1]}\\n'"
] |
[
"= None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self):",
"def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def actua(self): self.modo.actua(self) def mover(self): self.modo.mover()",
"def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def actua(self):",
"return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def actua(self): self.modo.actua(self) def",
"self.vida=0 self.modo = None self.ataque = 10 self.posicion = None def hablar(self): self.modo.hablar()",
"self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self):",
"def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso()",
"self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def",
"atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn()",
"class Bicho: def __init__(self): self.vida=0 self.modo = None self.ataque = 10 self.posicion =",
"dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo()",
"None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return",
"self.modo = None self.ataque = 10 self.posicion = None def hablar(self): self.modo.hablar() def",
"def __init__(self): self.vida=0 self.modo = None self.ataque = 10 self.posicion = None def",
"__init__(self): self.vida=0 self.modo = None self.ataque = 10 self.posicion = None def hablar(self):",
"def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self):",
"= 10 self.posicion = None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self):",
"None self.ataque = 10 self.posicion = None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir()",
"= None self.ataque = 10 self.posicion = None def hablar(self): self.modo.hablar() def dormir(self):",
"10 self.posicion = None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar()",
"esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def actua(self): self.modo.actua(self)",
"def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return",
"hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def",
"self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def",
"self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def actua(self): self.modo.actua(self) def mover(self):",
"self.ataque = 10 self.posicion = None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def",
"self.posicion = None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def",
"Bicho: def __init__(self): self.vida=0 self.modo = None self.ataque = 10 self.posicion = None"
] |
[
"too far and run out of fic: if (len(works) is 0): page_empty =",
"already a page indicator in the url if (start is not -1): #",
"of a works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how",
"= {'user-agent' : header_info} req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") #",
"parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide",
"requests import csv import sys import datetime import argparse page_empty = False base_url",
"responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") # see",
"-1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only != \"\": multichap_only",
"is not -1): url = url + \"&page=2\" # there an no modifiers",
"a csv for later use e.g. to retrieve fic text # Options: #",
"while(not_finished()): # 5 second delay between requests as per AO3's terms of service",
"= [] for tag in works: if (multichap_only): # FOR MULTICHAP ONLY chaps",
"crash doesn't lose everything. # include the url where it was found, #",
"see if we've gone too far and run out of fic: if (len(works)",
"would like to add additional search terms (that is should contain at least",
"new tag, and save to global url def add_tag_to_url(tag): global url key =",
"str(page) + url[page_end_index:] # if it's at the end of the url else:",
"add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) + len(key)",
"as text_file: text_file.write(\"url: \" + url + \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic)",
"def get_ids(header_info=''): global page_empty headers = {'user-agent' : header_info} req = requests.get(url, headers=headers)",
"order # Saves ids to a csv for later use e.g. to retrieve",
"http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids you want') parser.add_argument( '--multichapter_only',",
": header_info} req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness",
"str(num_requested_fic) + \"\\n\" + \"retreived on: \" + str(datetime.datetime.now())) # reset flags to",
"[] # keep track of all processed ids to avoid repeats: # this",
"False tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader =",
"a text file with the starting url, # and the number of requested",
"key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) + len(key) new_url = base_url[:start]",
"no more fic, end. # def not_finished(): if (page_empty): return False if (num_requested_fic",
"# # include a text file with the starting url, # and the",
"= base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # # after every page, write the",
"else: break # # if you want everything, you're not done # otherwise",
"after every page, write the gathered ids # to the csv, so a",
"all work ids # def get_ids(header_info=''): global page_empty headers = {'user-agent' : header_info}",
"= url + \"&page=2\" # there an no modifiers yet else: url =",
"works listed page, # then extract all work ids # def get_ids(header_info=''): global",
"def update_url_to_next_page(): global url key = \"page=\" start = url.find(key) # there is",
"# this is separate from the temporary batch of ids # that are",
"# def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\", 'a') as csvfile: wr",
"\"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" + url + \"\\n\" + \"num_requested_fic: \"",
"end of the url else: page = int(url[page_start_index:]) + 1 url = url[:page_start_index]",
"1 else: # there are other modifiers if (url.find(\"?\") is not -1): url",
"is 0): page_empty = True # process list for new fic ids ids",
"url[:page_start_index] + str(page) # there is no page indicator, so we are on",
"+ \"&work_search%5Bother_tag_names%5D=\" + tag # # after every page, write the gathered ids",
"terms of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info =",
"like to add additional search terms (that is should contain at least one",
"+ str(datetime.datetime.now())) # reset flags to run again # note: do not reset",
"default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids you want')",
"blurb group\") # see if we've gone too far and run out of",
"if (page_end_index is not -1): page = int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index]",
"want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for multichapter fics') parser.add_argument( '--tag_csv', default='',",
"all if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only",
"no page indicator, so we are on page 1 else: # there are",
"actually written to the csv. # If you've gone too far and there",
"False # # include a text file with the starting url, # and",
"\"&work_search%5Bother_tag_names%5D=\" + tag # # after every page, write the gathered ids #",
"listed page, # then extract all work ids # def get_ids(header_info=''): global page_empty",
"of ids # that are written to the csv and then forgotten seen_ids",
"ids = [] for tag in works: if (multichap_only): # FOR MULTICHAP ONLY",
"\"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" + \"retreived on: \" + str(datetime.datetime.now())) #",
"= num_recorded_fic + 1 else: break # # if you want everything, you're",
"csv_name = \"\" multichap_only = \"\" tags = [] # keep track of",
"that if you go too far, ao3 won't error, # but there will",
"import re import time import requests import csv import sys import datetime import",
"header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids you want') parser.add_argument( '--multichapter_only', default='',",
"listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they",
"otherwise compare recorded against requested. # recorded doesn't update until it's actually written",
"update the url to move to the next page # note that if",
"a search URL') parser.add_argument( 'url', metavar='URL', help='a single URL pointing to an AO3",
"= BeautifulSoup(req.text, \"lxml\") # some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works =",
"= get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args() make_readme() print (\"processing...\\n\") if",
"= args.url csv_name = str(args.out_csv) # defaults to all if (str(args.num_to_retrieve) is 'a'):",
"default='', help='only retrieve ids for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an optional",
"reset flags to run again # note: do not reset seen_ids def reset():",
"= True else: multichap_only = False tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv,",
"num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): # 5 second delay between requests as",
"(len(works) is 0): page_empty = True # process list for new fic ids",
"fics they want # what to call the output csv # # If",
"on page 1 else: # there are other modifiers if (url.find(\"?\") is not",
"can be restarted # def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\", 'a')",
"the next page # note that if you go too far, ao3 won't",
"wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1 else: break # # if you",
"for: # a url of a works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling",
"key = \"page=\" start = url.find(key) # there is already a page indicator",
"def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\", 'a') as csvfile: wr =",
"recorded doesn't update until it's actually written to the csv. # If you've",
"(len(tags)): for t in tags: print (\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info)",
"'--num_to_retrieve', default='a', help='how many fic ids you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve",
"re import time import requests import csv import sys import datetime import argparse",
"tag + \"%2C\" + base_url[start:] url = new_url else: url = base_url +",
"# Only retrieve multichapter fics # Modify search to include a list of",
"url = url[:page_start_index] + str(page) # there is no page indicator, so we",
"page indicator, so we are on page 1 else: # there are other",
"if (len(works) is 0): page_empty = True # process list for new fic",
"[] # # Ask the user for: # a url of a works",
"go too far, ao3 won't error, # but there will be no works",
"navigate to a works listed page, # then extract all work ids #",
"far and run out of fic: if (len(works) is 0): page_empty = True",
"# modify the base_url to include the new tag, and save to global",
"True # process list for new fic ids ids = [] for tag",
"# after every page, write the gathered ids # to the csv, so",
"they want # what to call the output csv # # If you",
"url[:page_start_index] + str(page) + url[page_end_index:] # if it's at the end of the",
"requested fics # def make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url:",
"return in searched order # Saves ids to a csv for later use",
"help='how many fic ids you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for",
"-1): page = int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] + str(page) + url[page_end_index:]",
"if (num_recorded_fic < num_requested_fic): return True else: return False # # include a",
"or \"fluff\") from bs4 import BeautifulSoup import re import time import requests import",
"move to the next page # note that if you go too far,",
"the retrieved fics must have one or more such tags') args = parser.parse_args()",
"too far and there are no more fic, end. # def not_finished(): if",
"str(page) # there is no page indicator, so we are on page 1",
"get_args(): global base_url global url global csv_name global num_requested_fic global multichap_only global tags",
"gathered ids # to the csv, so a crash doesn't lose everything. #",
"(e.g. you want all fics tagged either \"romance\" or \"fluff\") from bs4 import",
"csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info # #",
"== -1): return True else: if (num_recorded_fic < num_requested_fic): return True else: return",
"= csv.writer(csvfile, delimiter=',') for id in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic =",
"to avoid repeats: # this is separate from the temporary batch of ids",
"doesn't lose everything. # include the url where it was found, # so",
"middle of the url if (page_end_index is not -1): page = int(url[page_start_index:page_end_index]) +",
"not -1): url = url + \"&page=2\" # there an no modifiers yet",
"= base_url.find(key) + len(key) new_url = base_url[:start] + tag + \"%2C\" + base_url[start:]",
"multichap_only = False tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as tags_f:",
"works = soup.find_all(class_=\"work blurb group\") # see if we've gone too far and",
"tags_f: tags_reader = csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info = str(args.header) return",
"# def make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" +",
"indicator in the url if (start is not -1): # find where in",
"wr = csv.writer(csvfile, delimiter=',') for id in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic",
"1 url = url[:page_start_index] + str(page) # there is no page indicator, so",
"ids.append(t) seen_ids.append(t) return ids # # update the url to move to the",
"in tags: print (\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else: process_for_ids(header_info) print",
"temporary batch of ids # that are written to the csv and then",
"page_empty = False base_url = \"\" url = \"\" num_requested_fic = 0 num_recorded_fic",
"you would like to add additional search terms (that is should contain at",
"# some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\")",
"(not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1 else: break # # if",
"base_url[start:] url = new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag #",
"more such tags') args = parser.parse_args() url = args.url csv_name = str(args.out_csv) #",
"end. # def not_finished(): if (page_empty): return False if (num_requested_fic == -1): return",
"seen_ids = [] # # Ask the user for: # a url of",
"num_requested_fic global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given a",
"else: if (num_recorded_fic < num_requested_fic): return True else: return False # # include",
"necessarily all of) # specify these in the tag csv, one per row.",
"default='work_ids', help='csv output file name') parser.add_argument( '--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve',",
"but there will be no works listed # def update_url_to_next_page(): global url key",
"time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args() make_readme() print",
"in the middle of the url if (page_end_index is not -1): page =",
"# # after every page, write the gathered ids # to the csv,",
"+ base_url[start:] url = new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag",
"one of, but not necessarily all of) # specify these in the tag",
"user for: # a url of a works listed page # e.g. #",
"\"&page=2\" # there an no modifiers yet else: url = url + \"?page=2\"",
"to add additional search terms (that is should contain at least one of,",
"default='', help='provide an optional list of tags; the retrieved fics must have one",
"ids # to the csv, so a crash doesn't lose everything. # include",
"again # note: do not reset seen_ids def reset(): global page_empty global num_recorded_fic",
"start = base_url.find(key) + len(key) new_url = base_url[:start] + tag + \"%2C\" +",
"to global url def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start",
"(page_end_index is not -1): page = int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] +",
"to a csv for later use e.g. to retrieve fic text # Options:",
"t in tags: print (\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else: process_for_ids(header_info)",
"\"page=\" start = url.find(key) # there is already a page indicator in the",
"headers = {'user-agent' : header_info} req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\")",
"indicator, so we are on page 1 else: # there are other modifiers",
"url, # and the number of requested fics # def make_readme(): with open(csv_name",
"tags # (e.g. you want all fics tagged either \"romance\" or \"fluff\") from",
"keep track of all processed ids to avoid repeats: # this is separate",
"import requests import csv import sys import datetime import argparse page_empty = False",
"# if it's at the end of the url else: page = int(url[page_start_index:])",
"must have one or more such tags') args = parser.parse_args() url = args.url",
"Saves ids to a csv for later use e.g. to retrieve fic text",
"process list for new fic ids ids = [] for tag in works:",
"won't error, # but there will be no works listed # def update_url_to_next_page():",
"t in seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id') t = t[5:] if",
"(base_url.find(key)): start = base_url.find(key) + len(key) new_url = base_url[:start] + tag + \"%2C\"",
"else: # there are other modifiers if (url.find(\"?\") is not -1): url =",
"found, # so an interrupted search can be restarted # def write_ids_to_csv(ids): global",
"# def get_ids(header_info=''): global page_empty headers = {'user-agent' : header_info} req = requests.get(url,",
"start + len(key) page_end_index = url.find(\"&\", page_start_index) # if it's in the middle",
"+ \"&page=2\" # there an no modifiers yet else: url = url +",
"in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1 else: break",
"other modifiers if (url.find(\"?\") is not -1): url = url + \"&page=2\" #",
"class_=\"chapters\") if (chaps.text != u\"1/1\"): t = tag.get('id') t = t[5:] if not",
"= tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t = tag.get('id') t = t[5:]",
"if (len(tags)): for t in tags: print (\"Getting tag: \", t) reset() add_tag_to_url(t)",
"if (base_url.find(key)): start = base_url.find(key) + len(key) new_url = base_url[:start] + tag +",
"base_url to include the new tag, and save to global url def add_tag_to_url(tag):",
"work ids # def get_ids(header_info=''): global page_empty headers = {'user-agent' : header_info} req",
"service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args() make_readme()",
"-1): # find where in the url the page indicator starts and ends",
"error, # but there will be no works listed # def update_url_to_next_page(): global",
"of, but not necessarily all of) # specify these in the tag csv,",
"text_file.write(\"url: \" + url + \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\"",
"global num_requested_fic global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given",
"from the temporary batch of ids # that are written to the csv",
"= \"\" num_requested_fic = 0 num_recorded_fic = 0 csv_name = \"\" multichap_only =",
"base_url = \"\" url = \"\" num_requested_fic = 0 num_recorded_fic = 0 csv_name",
"# to the csv, so a crash doesn't lose everything. # include the",
"but not necessarily all of) # specify these in the tag csv, one",
"url where it was found, # so an interrupted search can be restarted",
"global num_recorded_fic with open(csv_name + \".csv\", 'a') as csvfile: wr = csv.writer(csvfile, delimiter=',')",
"note: do not reset seen_ids def reset(): global page_empty global num_recorded_fic page_empty =",
"num_recorded_fic with open(csv_name + \".csv\", 'a') as csvfile: wr = csv.writer(csvfile, delimiter=',') for",
"list of tags # (e.g. you want all fics tagged either \"romance\" or",
"= -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only != \"\":",
"multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an optional list of tags; the retrieved",
"page indicator starts and ends page_start_index = start + len(key) page_end_index = url.find(\"&\",",
"t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id') t",
"url.find(\"&\", page_start_index) # if it's in the middle of the url if (page_end_index",
"t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) return ids #",
"If you've gone too far and there are no more fic, end. #",
"between requests as per AO3's terms of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids)",
"5 second delay between requests as per AO3's terms of service time.sleep(5) ids",
"# but there will be no works listed # def update_url_to_next_page(): global url",
"fic text # Options: # Only retrieve multichapter fics # Modify search to",
"include the url where it was found, # so an interrupted search can",
"True else: if (num_recorded_fic < num_requested_fic): return True else: return False # #",
"global page_empty global num_recorded_fic page_empty = False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()):",
"# specify these in the tag csv, one per row. def get_args(): global",
"e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want # what",
"to the next page # note that if you go too far, ao3",
"\"\" num_requested_fic = 0 num_recorded_fic = 0 csv_name = \"\" multichap_only = \"\"",
"page_start_index) # if it's in the middle of the url if (page_end_index is",
"multichap_only = True else: multichap_only = False tag_csv = str(args.tag_csv) if (tag_csv): with",
"are written to the csv and then forgotten seen_ids = [] # #",
"header_info = str(args.header) return header_info # # navigate to a works listed page,",
"in searched order # Saves ids to a csv for later use e.g.",
"the page indicator starts and ends page_start_index = start + len(key) page_end_index =",
"the csv, so a crash doesn't lose everything. # include the url where",
"# if you want everything, you're not done # otherwise compare recorded against",
"args = parser.parse_args() url = args.url csv_name = str(args.out_csv) # defaults to all",
"get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for t in tags: print (\"Getting tag:",
"works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics",
"(chaps.text != u\"1/1\"): t = tag.get('id') t = t[5:] if not t in",
"page = int(url[page_start_index:]) + 1 url = url[:page_start_index] + str(page) # there is",
"parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given a search URL') parser.add_argument( 'url', metavar='URL',",
"csvfile: wr = csv.writer(csvfile, delimiter=',') for id in ids: if (not_finished()): wr.writerow([id, url])",
"fics # Modify search to include a list of tags # (e.g. you",
"True else: multichap_only = False tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\")",
"if you want everything, you're not done # otherwise compare recorded against requested.",
"IDs given a search URL') parser.add_argument( 'url', metavar='URL', help='a single URL pointing to",
"MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t = tag.get('id')",
"until it's actually written to the csv. # If you've gone too far",
"row. def get_args(): global base_url global url global csv_name global num_requested_fic global multichap_only",
"global url global csv_name global num_requested_fic global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape",
"(start is not -1): # find where in the url the page indicator",
"not_finished(): if (page_empty): return False if (num_requested_fic == -1): return True else: if",
"else: t = tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t)",
"pointing to an AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file name')",
"recorded against requested. # recorded doesn't update until it's actually written to the",
"\"retreived on: \" + str(datetime.datetime.now())) # reset flags to run again # note:",
"url = url + \"&page=2\" # there an no modifiers yet else: url",
"= False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): # 5 second delay between",
"False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): # 5 second delay between requests",
"to include the new tag, and save to global url def add_tag_to_url(tag): global",
"modify the base_url to include the new tag, and save to global url",
"seen_ids: ids.append(t) seen_ids.append(t) return ids # # update the url to move to",
"these in the tag csv, one per row. def get_args(): global base_url global",
"csv import sys import datetime import argparse page_empty = False base_url = \"\"",
"# that are written to the csv and then forgotten seen_ids = []",
"is not -1): page = int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] + str(page)",
"url = url + \"?page=2\" # modify the base_url to include the new",
"# there an no modifiers yet else: url = url + \"?page=2\" #",
"the middle of the url if (page_end_index is not -1): page = int(url[page_start_index:page_end_index])",
"-1): return True else: if (num_recorded_fic < num_requested_fic): return True else: return False",
"= url[:page_start_index] + str(page) + url[page_end_index:] # if it's at the end of",
"as per AO3's terms of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def",
"num_recorded_fic page_empty = False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): # 5 second",
"BeautifulSoup import re import time import requests import csv import sys import datetime",
"be no works listed # def update_url_to_next_page(): global url key = \"page=\" start",
"it was found, # so an interrupted search can be restarted # def",
"# If you would like to add additional search terms (that is should",
"delay between requests as per AO3's terms of service time.sleep(5) ids = get_ids(header_info)",
"tags: print (\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else: process_for_ids(header_info) print (\"That's",
"was found, # so an interrupted search can be restarted # def write_ids_to_csv(ids):",
"csv for later use e.g. to retrieve fic text # Options: # Only",
"and the number of requested fics # def make_readme(): with open(csv_name + \"_readme.txt\",",
"so a crash doesn't lose everything. # include the url where it was",
"modifiers if (url.find(\"?\") is not -1): url = url + \"&page=2\" # there",
"global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) + len(key) new_url",
"gone too far and there are no more fic, end. # def not_finished():",
"'--tag_csv', default='', help='provide an optional list of tags; the retrieved fics must have",
"want # what to call the output csv # # If you would",
"# e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want #",
"'a'): num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only",
"= \"\" tags = [] # keep track of all processed ids to",
"metavar='URL', help='a single URL pointing to an AO3 search page') parser.add_argument( '--out_csv', default='work_ids',",
"that are written to the csv and then forgotten seen_ids = [] #",
"more fic, end. # def not_finished(): if (page_empty): return False if (num_requested_fic ==",
"url to move to the next page # note that if you go",
"def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) +",
"+ \"\\n\" + \"retreived on: \" + str(datetime.datetime.now())) # reset flags to run",
"parser.add_argument( 'url', metavar='URL', help='a single URL pointing to an AO3 search page') parser.add_argument(",
"url global csv_name global num_requested_fic global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3",
"reset seen_ids def reset(): global page_empty global num_recorded_fic page_empty = False num_recorded_fic =",
"parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids you want') parser.add_argument( '--multichapter_only', default='', help='only",
"tags') args = parser.parse_args() url = args.url csv_name = str(args.out_csv) # defaults to",
"True else: return False # # include a text file with the starting",
"least one of, but not necessarily all of) # specify these in the",
"write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for",
"csv, one per row. def get_args(): global base_url global url global csv_name global",
"return ids # # update the url to move to the next page",
"is separate from the temporary batch of ids # that are written to",
"defaults to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else: num_requested_fic =",
"page, write the gathered ids # to the csv, so a crash doesn't",
"in seen_ids: ids.append(t) seen_ids.append(t) return ids # # update the url to move",
"def reset(): global page_empty global num_recorded_fic page_empty = False num_recorded_fic = 0 def",
"+ \"retreived on: \" + str(datetime.datetime.now())) # reset flags to run again #",
"(str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only)",
"page, # then extract all work ids # def get_ids(header_info=''): global page_empty headers",
"tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t = tag.get('id') t = t[5:] if",
"= [] # keep track of all processed ids to avoid repeats: #",
"u\"1/1\"): t = tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t)",
"page_empty = True # process list for new fic ids ids = []",
"either \"romance\" or \"fluff\") from bs4 import BeautifulSoup import re import time import",
"base_url[:start] + tag + \"%2C\" + base_url[start:] url = new_url else: url =",
"contain at least one of, but not necessarily all of) # specify these",
"url if (start is not -1): # find where in the url the",
"\" + str(datetime.datetime.now())) # reset flags to run again # note: do not",
"to the csv, so a crash doesn't lose everything. # include the url",
"it's at the end of the url else: page = int(url[page_start_index:]) + 1",
"t = tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t)",
"datetime import argparse page_empty = False base_url = \"\" url = \"\" num_requested_fic",
"# Will return in searched order # Saves ids to a csv for",
"= url[:page_start_index] + str(page) # there is no page indicator, so we are",
"ends page_start_index = start + len(key) page_end_index = url.find(\"&\", page_start_index) # if it's",
"fic ids you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for multichapter fics')",
"url if (page_end_index is not -1): page = int(url[page_start_index:page_end_index]) + 1 url =",
"False if (num_requested_fic == -1): return True else: if (num_recorded_fic < num_requested_fic): return",
"\"lxml\") # some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb",
"optional list of tags; the retrieved fics must have one or more such",
"\" + str(num_requested_fic) + \"\\n\" + \"retreived on: \" + str(datetime.datetime.now())) # reset",
"requests as per AO3's terms of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page()",
"per AO3's terms of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main():",
"against requested. # recorded doesn't update until it's actually written to the csv.",
"the number of requested fics # def make_readme(): with open(csv_name + \"_readme.txt\", \"w\")",
"+ tag + \"%2C\" + base_url[start:] url = new_url else: url = base_url",
"url = \"\" num_requested_fic = 0 num_recorded_fic = 0 csv_name = \"\" multichap_only",
"all processed ids to avoid repeats: # this is separate from the temporary",
"csv, so a crash doesn't lose everything. # include the url where it",
"= 0 def process_for_ids(header_info=''): while(not_finished()): # 5 second delay between requests as per",
"to call the output csv # # If you would like to add",
"# see if we've gone too far and run out of fic: if",
"separate from the temporary batch of ids # that are written to the",
"# https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want # what to call the",
"the csv and then forgotten seen_ids = [] # # Ask the user",
"FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t =",
"to retrieve fic text # Options: # Only retrieve multichapter fics # Modify",
"fics # def make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \"",
"retrieve ids for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an optional list of",
"https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want # what to call the output",
"tag in works: if (multichap_only): # FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\")",
"a page indicator in the url if (start is not -1): # find",
"requested. # recorded doesn't update until it's actually written to the csv. #",
"fics') parser.add_argument( '--tag_csv', default='', help='provide an optional list of tags; the retrieved fics",
"num_recorded_fic = 0 csv_name = \"\" multichap_only = \"\" tags = [] #",
"so we are on page 1 else: # there are other modifiers if",
"= str(args.header) return header_info # # navigate to a works listed page, #",
"url + \"?page=2\" # modify the base_url to include the new tag, and",
"indicator starts and ends page_start_index = start + len(key) page_end_index = url.find(\"&\", page_start_index)",
"if (num_requested_fic == -1): return True else: if (num_recorded_fic < num_requested_fic): return True",
"with the starting url, # and the number of requested fics # def",
"return True else: return False # # include a text file with the",
"# there are other modifiers if (url.find(\"?\") is not -1): url = url",
"and run out of fic: if (len(works) is 0): page_empty = True #",
"new_url = base_url[:start] + tag + \"%2C\" + base_url[start:] url = new_url else:",
"# def update_url_to_next_page(): global url key = \"page=\" start = url.find(key) # there",
"out of fic: if (len(works) is 0): page_empty = True # process list",
"to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve)",
"(tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f) for row in tags_reader:",
"if (start is not -1): # find where in the url the page",
"soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works",
"\"w\") as text_file: text_file.write(\"url: \" + url + \"\\n\" + \"num_requested_fic: \" +",
"are on page 1 else: # there are other modifiers if (url.find(\"?\") is",
"a crash doesn't lose everything. # include the url where it was found,",
"False base_url = \"\" url = \"\" num_requested_fic = 0 num_recorded_fic = 0",
"ids to a csv for later use e.g. to retrieve fic text #",
"sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") # see if we've gone too",
"# a url of a works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling #",
"# there is no page indicator, so we are on page 1 else:",
"of the url if (page_end_index is not -1): page = int(url[page_start_index:page_end_index]) + 1",
"args.url csv_name = str(args.out_csv) # defaults to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic",
"all fics tagged either \"romance\" or \"fluff\") from bs4 import BeautifulSoup import re",
"ids from an AO3 search # Will return in searched order # Saves",
"retrieve multichapter fics # Modify search to include a list of tags #",
"url = url[:page_start_index] + str(page) + url[page_end_index:] # if it's at the end",
"URL pointing to an AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file",
"page = int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] + str(page) + url[page_end_index:] #",
"url def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key)",
"# Modify search to include a list of tags # (e.g. you want",
"= \"\" multichap_only = \"\" tags = [] # keep track of all",
"= str(args.multichapter_only) if multichap_only != \"\": multichap_only = True else: multichap_only = False",
"= int(url[page_start_index:]) + 1 url = url[:page_start_index] + str(page) # there is no",
"in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") # see if",
"starts and ends page_start_index = start + len(key) page_end_index = url.find(\"&\", page_start_index) #",
"Options: # Only retrieve multichapter fics # Modify search to include a list",
"tags = [] # keep track of all processed ids to avoid repeats:",
"= int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only != \"\": multichap_only = True else:",
"page # note that if you go too far, ao3 won't error, #",
"for t in tags: print (\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else:",
"output file name') parser.add_argument( '--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how",
"# defaults to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else: num_requested_fic",
"= tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) else:",
"'--out_csv', default='work_ids', help='csv output file name') parser.add_argument( '--header', default='', help='user http header') parser.add_argument(",
"if it's in the middle of the url if (page_end_index is not -1):",
"+ \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" + url + \"\\n\" + \"num_requested_fic:",
"given a search URL') parser.add_argument( 'url', metavar='URL', help='a single URL pointing to an",
"there an no modifiers yet else: url = url + \"?page=2\" # modify",
"< num_requested_fic): return True else: return False # # include a text file",
"page_empty = False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): # 5 second delay",
"bs4 import BeautifulSoup import re import time import requests import csv import sys",
"with open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" + url + \"\\n\"",
"in works: if (multichap_only): # FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if",
"to an AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file name') parser.add_argument(",
"\"\" url = \"\" num_requested_fic = 0 num_recorded_fic = 0 csv_name = \"\"",
"t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) else: t =",
"the end of the url else: page = int(url[page_start_index:]) + 1 url =",
"row in tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info # # navigate to",
"chaps = tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t = tag.get('id') t =",
"\"\": multichap_only = True else: multichap_only = False tag_csv = str(args.tag_csv) if (tag_csv):",
"for tag in works: if (multichap_only): # FOR MULTICHAP ONLY chaps = tag.find('dd',",
"if multichap_only != \"\": multichap_only = True else: multichap_only = False tag_csv =",
"# then extract all work ids # def get_ids(header_info=''): global page_empty headers =",
"if you go too far, ao3 won't error, # but there will be",
"# include the url where it was found, # so an interrupted search",
"of tags; the retrieved fics must have one or more such tags') args",
"# process list for new fic ids ids = [] for tag in",
"the url where it was found, # so an interrupted search can be",
"done # otherwise compare recorded against requested. # recorded doesn't update until it's",
"str(datetime.datetime.now())) # reset flags to run again # note: do not reset seen_ids",
"csv # # If you would like to add additional search terms (that",
"tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) else: t",
"ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args() make_readme() print (\"processing...\\n\")",
"if we've gone too far and run out of fic: if (len(works) is",
"include a list of tags # (e.g. you want all fics tagged either",
"requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness in the \"UI\" sys.stdout.write('.')",
"argparse page_empty = False base_url = \"\" url = \"\" num_requested_fic = 0",
"multichap_only != \"\": multichap_only = True else: multichap_only = False tag_csv = str(args.tag_csv)",
"else: multichap_only = False tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as",
"= argparse.ArgumentParser(description='Scrape AO3 work IDs given a search URL') parser.add_argument( 'url', metavar='URL', help='a",
"ao3 won't error, # but there will be no works listed # def",
"of requested fics # def make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as text_file:",
"save to global url def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)):",
"as tags_f: tags_reader = csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info = str(args.header)",
"work IDs given a search URL') parser.add_argument( 'url', metavar='URL', help='a single URL pointing",
"URL') parser.add_argument( 'url', metavar='URL', help='a single URL pointing to an AO3 search page')",
"= url.find(\"&\", page_start_index) # if it's in the middle of the url if",
"the url to move to the next page # note that if you",
"we've gone too far and run out of fic: if (len(works) is 0):",
"include a text file with the starting url, # and the number of",
"ids ids = [] for tag in works: if (multichap_only): # FOR MULTICHAP",
"for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an optional list of tags; the",
"+ str(page) # there is no page indicator, so we are on page",
"to move to the next page # note that if you go too",
"csv and then forgotten seen_ids = [] # # Ask the user for:",
"reset(): global page_empty global num_recorded_fic page_empty = False num_recorded_fic = 0 def process_for_ids(header_info=''):",
"the gathered ids # to the csv, so a crash doesn't lose everything.",
"note that if you go too far, ao3 won't error, # but there",
"tag csv, one per row. def get_args(): global base_url global url global csv_name",
"= \"page=\" start = url.find(key) # there is already a page indicator in",
"page_end_index = url.find(\"&\", page_start_index) # if it's in the middle of the url",
"for row in tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info # # navigate",
"1 url = url[:page_start_index] + str(page) + url[page_end_index:] # if it's at the",
"seen_ids def reset(): global page_empty global num_recorded_fic page_empty = False num_recorded_fic = 0",
"the temporary batch of ids # that are written to the csv and",
"multichap_only = str(args.multichapter_only) if multichap_only != \"\": multichap_only = True else: multichap_only =",
"should contain at least one of, but not necessarily all of) # specify",
"get_ids(header_info=''): global page_empty headers = {'user-agent' : header_info} req = requests.get(url, headers=headers) soup",
"argparse.ArgumentParser(description='Scrape AO3 work IDs given a search URL') parser.add_argument( 'url', metavar='URL', help='a single",
"search can be restarted # def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\",",
"# # navigate to a works listed page, # then extract all work",
"url = new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # #",
"num_recorded_fic = num_recorded_fic + 1 else: break # # if you want everything,",
"if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only =",
"# Options: # Only retrieve multichapter fics # Modify search to include a",
"= [] # # Ask the user for: # a url of a",
"avoid repeats: # this is separate from the temporary batch of ids #",
"fic ids from an AO3 search # Will return in searched order #",
"= requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness in the \"UI\"",
"url = args.url csv_name = str(args.out_csv) # defaults to all if (str(args.num_to_retrieve) is",
"per row. def get_args(): global base_url global url global csv_name global num_requested_fic global",
"seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id') t = t[5:] if not t",
"tagged either \"romance\" or \"fluff\") from bs4 import BeautifulSoup import re import time",
"Ask the user for: # a url of a works listed page #",
"to the csv. # If you've gone too far and there are no",
"url else: page = int(url[page_start_index:]) + 1 url = url[:page_start_index] + str(page) #",
"no modifiers yet else: url = url + \"?page=2\" # modify the base_url",
"+ \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" + \"retreived on: \"",
"!= \"\": multichap_only = True else: multichap_only = False tag_csv = str(args.tag_csv) if",
"len(key) page_end_index = url.find(\"&\", page_start_index) # if it's in the middle of the",
"url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # # after every page, write",
"not reset seen_ids def reset(): global page_empty global num_recorded_fic page_empty = False num_recorded_fic",
"# there is already a page indicator in the url if (start is",
"str(args.header) return header_info # # navigate to a works listed page, # then",
"second delay between requests as per AO3's terms of service time.sleep(5) ids =",
"+ \".csv\", 'a') as csvfile: wr = csv.writer(csvfile, delimiter=',') for id in ids:",
"an AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file name') parser.add_argument( '--header',",
"else: page = int(url[page_start_index:]) + 1 url = url[:page_start_index] + str(page) # there",
"# include a text file with the starting url, # and the number",
"global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given a search",
"search # Will return in searched order # Saves ids to a csv",
"sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") # see if we've gone too far",
"+ url + \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" + \"retreived",
"\"\" multichap_only = \"\" tags = [] # keep track of all processed",
"not done # otherwise compare recorded against requested. # recorded doesn't update until",
"output csv # # If you would like to add additional search terms",
"update until it's actually written to the csv. # If you've gone too",
"multichap_only = \"\" tags = [] # keep track of all processed ids",
"list for new fic ids ids = [] for tag in works: if",
"= soup.find_all(class_=\"work blurb group\") # see if we've gone too far and run",
"url.find(key) # there is already a page indicator in the url if (start",
"= 0 csv_name = \"\" multichap_only = \"\" tags = [] # keep",
"do not reset seen_ids def reset(): global page_empty global num_recorded_fic page_empty = False",
"= int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] + str(page) + url[page_end_index:] # if",
"= csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info #",
"# # If you would like to add additional search terms (that is",
"an interrupted search can be restarted # def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name",
"+ str(num_requested_fic) + \"\\n\" + \"retreived on: \" + str(datetime.datetime.now())) # reset flags",
"str(args.multichapter_only) if multichap_only != \"\": multichap_only = True else: multichap_only = False tag_csv",
"= parser.parse_args() url = args.url csv_name = str(args.out_csv) # defaults to all if",
"= url + \"?page=2\" # modify the base_url to include the new tag,",
"lose everything. # include the url where it was found, # so an",
"\"fluff\") from bs4 import BeautifulSoup import re import time import requests import csv",
"of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args()",
"= 0 num_recorded_fic = 0 csv_name = \"\" multichap_only = \"\" tags =",
"if not t in seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id') t =",
"tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) return ids",
"listed # def update_url_to_next_page(): global url key = \"page=\" start = url.find(key) #",
"one per row. def get_args(): global base_url global url global csv_name global num_requested_fic",
"an AO3 search # Will return in searched order # Saves ids to",
"\"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") # see if we've gone",
"is already a page indicator in the url if (start is not -1):",
"header_info = get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for t in tags: print",
"a works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many",
"additional search terms (that is should contain at least one of, but not",
"help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids you want') parser.add_argument(",
"base_url.find(key) + len(key) new_url = base_url[:start] + tag + \"%2C\" + base_url[start:] url",
"a list of tags # (e.g. you want all fics tagged either \"romance\"",
"tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else: process_for_ids(header_info) print (\"That's all, folks.\") main()",
"not t in seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id') t = t[5:]",
"to run again # note: do not reset seen_ids def reset(): global page_empty",
"run again # note: do not reset seen_ids def reset(): global page_empty global",
"to include a list of tags # (e.g. you want all fics tagged",
"url the page indicator starts and ends page_start_index = start + len(key) page_end_index",
"want all fics tagged either \"romance\" or \"fluff\") from bs4 import BeautifulSoup import",
"file with the starting url, # and the number of requested fics #",
"the url else: page = int(url[page_start_index:]) + 1 url = url[:page_start_index] + str(page)",
"0 num_recorded_fic = 0 csv_name = \"\" multichap_only = \"\" tags = []",
"many fic ids you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for multichapter",
"BeautifulSoup(req.text, \"lxml\") # some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work",
"there are other modifiers if (url.find(\"?\") is not -1): url = url +",
"= t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id')",
"on: \" + str(datetime.datetime.now())) # reset flags to run again # note: do",
"<filename>scripts/ao3_work_ids.py<gh_stars>0 # Retrieve fic ids from an AO3 search # Will return in",
"parser.add_argument( '--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids",
"+ \"%2C\" + base_url[start:] url = new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\"",
"you want all fics tagged either \"romance\" or \"fluff\") from bs4 import BeautifulSoup",
"\".csv\", 'a') as csvfile: wr = csv.writer(csvfile, delimiter=',') for id in ids: if",
"or more such tags') args = parser.parse_args() url = args.url csv_name = str(args.out_csv)",
"\"r\") as tags_f: tags_reader = csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info =",
"are no more fic, end. # def not_finished(): if (page_empty): return False if",
"https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want # what to call",
"[] for tag in works: if (multichap_only): # FOR MULTICHAP ONLY chaps =",
"parser.add_argument( '--tag_csv', default='', help='provide an optional list of tags; the retrieved fics must",
"else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # # after every page,",
"0): page_empty = True # process list for new fic ids ids =",
"include the new tag, and save to global url def add_tag_to_url(tag): global url",
"else: url = url + \"?page=2\" # modify the base_url to include the",
"in tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info # # navigate to a",
"num_recorded_fic + 1 else: break # # if you want everything, you're not",
"if it's at the end of the url else: page = int(url[page_start_index:]) +",
"url of a works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count #",
"# recorded doesn't update until it's actually written to the csv. # If",
"is should contain at least one of, but not necessarily all of) #",
"ids to avoid repeats: # this is separate from the temporary batch of",
"= start + len(key) page_end_index = url.find(\"&\", page_start_index) # if it's in the",
"time import requests import csv import sys import datetime import argparse page_empty =",
"global tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given a search URL') parser.add_argument(",
"of the url else: page = int(url[page_start_index:]) + 1 url = url[:page_start_index] +",
"the base_url to include the new tag, and save to global url def",
"def process_for_ids(header_info=''): while(not_finished()): # 5 second delay between requests as per AO3's terms",
"= get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for t in tags: print (\"Getting",
"next page # note that if you go too far, ao3 won't error,",
"main(): header_info = get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for t in tags:",
"+ url[page_end_index:] # if it's at the end of the url else: page",
"then extract all work ids # def get_ids(header_info=''): global page_empty headers = {'user-agent'",
"compare recorded against requested. # recorded doesn't update until it's actually written to",
"we are on page 1 else: # there are other modifiers if (url.find(\"?\")",
"there are no more fic, end. # def not_finished(): if (page_empty): return False",
"multichapter fics # Modify search to include a list of tags # (e.g.",
"there will be no works listed # def update_url_to_next_page(): global url key =",
"group\") # see if we've gone too far and run out of fic:",
"+ 1 url = url[:page_start_index] + str(page) # there is no page indicator,",
"return False if (num_requested_fic == -1): return True else: if (num_recorded_fic < num_requested_fic):",
"break # # if you want everything, you're not done # otherwise compare",
"import datetime import argparse page_empty = False base_url = \"\" url = \"\"",
"call the output csv # # If you would like to add additional",
"specify these in the tag csv, one per row. def get_args(): global base_url",
"in seen_ids: ids.append(t) seen_ids.append(t) else: t = tag.get('id') t = t[5:] if not",
"you want everything, you're not done # otherwise compare recorded against requested. #",
"too far, ao3 won't error, # but there will be no works listed",
"you're not done # otherwise compare recorded against requested. # recorded doesn't update",
"the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") # see if we've",
"ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1 else: break #",
"as csvfile: wr = csv.writer(csvfile, delimiter=',') for id in ids: if (not_finished()): wr.writerow([id,",
"url + \"&page=2\" # there an no modifiers yet else: url = url",
"tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f)",
"interrupted search can be restarted # def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name +",
"= base_url[:start] + tag + \"%2C\" + base_url[start:] url = new_url else: url",
"not necessarily all of) # specify these in the tag csv, one per",
"is no page indicator, so we are on page 1 else: # there",
"def not_finished(): if (page_empty): return False if (num_requested_fic == -1): return True else:",
"ids for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an optional list of tags;",
"e.g. to retrieve fic text # Options: # Only retrieve multichapter fics #",
"(page_empty): return False if (num_requested_fic == -1): return True else: if (num_recorded_fic <",
"an no modifiers yet else: url = url + \"?page=2\" # modify the",
"!= u\"1/1\"): t = tag.get('id') t = t[5:] if not t in seen_ids:",
"\"\" tags = [] # keep track of all processed ids to avoid",
"import sys import datetime import argparse page_empty = False base_url = \"\" url",
"update_url_to_next_page(): global url key = \"page=\" start = url.find(key) # there is already",
"with open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f) for row in tags_reader: tags.append(row[0])",
"then forgotten seen_ids = [] # # Ask the user for: # a",
"fic ids ids = [] for tag in works: if (multichap_only): # FOR",
"some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush() works = soup.find_all(class_=\"work blurb group\") #",
"# If you've gone too far and there are no more fic, end.",
"seen_ids.append(t) else: t = tag.get('id') t = t[5:] if not t in seen_ids:",
"int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] + str(page) + url[page_end_index:] # if it's",
"if (url.find(\"?\") is not -1): url = url + \"&page=2\" # there an",
"write the gathered ids # to the csv, so a crash doesn't lose",
"# (e.g. you want all fics tagged either \"romance\" or \"fluff\") from bs4",
"page_start_index = start + len(key) page_end_index = url.find(\"&\", page_start_index) # if it's in",
"{'user-agent' : header_info} req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some",
"import csv import sys import datetime import argparse page_empty = False base_url =",
"len(key) new_url = base_url[:start] + tag + \"%2C\" + base_url[start:] url = new_url",
"(num_requested_fic == -1): return True else: if (num_recorded_fic < num_requested_fic): return True else:",
"tag # # after every page, write the gathered ids # to the",
"yet else: url = url + \"?page=2\" # modify the base_url to include",
"import argparse page_empty = False base_url = \"\" url = \"\" num_requested_fic =",
"works listed # def update_url_to_next_page(): global url key = \"page=\" start = url.find(key)",
"# keep track of all processed ids to avoid repeats: # this is",
"\"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" + \"retreived on: \" +",
"if (tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f) for row in",
"fics must have one or more such tags') args = parser.parse_args() url =",
"everything, you're not done # otherwise compare recorded against requested. # recorded doesn't",
"gone too far and run out of fic: if (len(works) is 0): page_empty",
"file name') parser.add_argument( '--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many",
"for id in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1",
"extract all work ids # def get_ids(header_info=''): global page_empty headers = {'user-agent' :",
"# def not_finished(): if (page_empty): return False if (num_requested_fic == -1): return True",
"(url.find(\"?\") is not -1): url = url + \"&page=2\" # there an no",
"and save to global url def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if",
"be restarted # def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\", 'a') as",
"batch of ids # that are written to the csv and then forgotten",
"url key = \"page=\" start = url.find(key) # there is already a page",
"= False base_url = \"\" url = \"\" num_requested_fic = 0 num_recorded_fic =",
"so an interrupted search can be restarted # def write_ids_to_csv(ids): global num_recorded_fic with",
"use e.g. to retrieve fic text # Options: # Only retrieve multichapter fics",
"run out of fic: if (len(works) is 0): page_empty = True # process",
"Retrieve fic ids from an AO3 search # Will return in searched order",
"# update the url to move to the next page # note that",
"there is no page indicator, so we are on page 1 else: #",
"and there are no more fic, end. # def not_finished(): if (page_empty): return",
"a url of a works listed page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count",
"the output csv # # If you would like to add additional search",
"= t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) return ids # #",
"header_info} req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness in",
"is not -1): # find where in the url the page indicator starts",
"tags.append(row[0]) header_info = str(args.header) return header_info # # navigate to a works listed",
"write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\", 'a') as csvfile: wr = csv.writer(csvfile,",
"# Saves ids to a csv for later use e.g. to retrieve fic",
"\"romance\" or \"fluff\") from bs4 import BeautifulSoup import re import time import requests",
"the url if (page_end_index is not -1): page = int(url[page_start_index:page_end_index]) + 1 url",
"everything. # include the url where it was found, # so an interrupted",
"have one or more such tags') args = parser.parse_args() url = args.url csv_name",
"help='provide an optional list of tags; the retrieved fics must have one or",
"in the tag csv, one per row. def get_args(): global base_url global url",
"# FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t",
"in the url if (start is not -1): # find where in the",
"id in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1 else:",
"ids you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for multichapter fics') parser.add_argument(",
"soup.find_all(class_=\"work blurb group\") # see if we've gone too far and run out",
"it's in the middle of the url if (page_end_index is not -1): page",
"this is separate from the temporary batch of ids # that are written",
"csv_name global num_requested_fic global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs",
"'url', metavar='URL', help='a single URL pointing to an AO3 search page') parser.add_argument( '--out_csv',",
"# so an interrupted search can be restarted # def write_ids_to_csv(ids): global num_recorded_fic",
"new fic ids ids = [] for tag in works: if (multichap_only): #",
"= False tag_csv = str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader",
"of all processed ids to avoid repeats: # this is separate from the",
"AO3 search # Will return in searched order # Saves ids to a",
"processed ids to avoid repeats: # this is separate from the temporary batch",
"tags_reader = csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info",
"tag, and save to global url def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\"",
"make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" + url +",
"# reset flags to run again # note: do not reset seen_ids def",
"all of) # specify these in the tag csv, one per row. def",
"url + \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" + \"retreived on:",
"return False # # include a text file with the starting url, #",
"page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file name') parser.add_argument( '--header', default='', help='user http",
"if not t in seen_ids: ids.append(t) seen_ids.append(t) return ids # # update the",
"\"\\n\" + \"retreived on: \" + str(datetime.datetime.now())) # reset flags to run again",
"make_readme() print (\"processing...\\n\") if (len(tags)): for t in tags: print (\"Getting tag: \",",
"track of all processed ids to avoid repeats: # this is separate from",
"# how many fics they want # what to call the output csv",
"+ str(page) + url[page_end_index:] # if it's at the end of the url",
"search terms (that is should contain at least one of, but not necessarily",
"help='only retrieve ids for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an optional list",
"every page, write the gathered ids # to the csv, so a crash",
"with open(csv_name + \".csv\", 'a') as csvfile: wr = csv.writer(csvfile, delimiter=',') for id",
"+ 1 else: break # # if you want everything, you're not done",
"retrieved fics must have one or more such tags') args = parser.parse_args() url",
"1 else: break # # if you want everything, you're not done #",
"forgotten seen_ids = [] # # Ask the user for: # a url",
"\"%2C\" + base_url[start:] url = new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" +",
"to a works listed page, # then extract all work ids # def",
"If you would like to add additional search terms (that is should contain",
"csv. # If you've gone too far and there are no more fic,",
"0 def process_for_ids(header_info=''): while(not_finished()): # 5 second delay between requests as per AO3's",
"# https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want # what to",
"find where in the url the page indicator starts and ends page_start_index =",
"= \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) + len(key) new_url = base_url[:start] +",
"an optional list of tags; the retrieved fics must have one or more",
"number of requested fics # def make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as",
"a works listed page, # then extract all work ids # def get_ids(header_info=''):",
"if (multichap_only): # FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text !=",
"list of tags; the retrieved fics must have one or more such tags')",
"AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file name') parser.add_argument( '--header', default='',",
"start = url.find(key) # there is already a page indicator in the url",
"multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given a search URL')",
"headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness in the \"UI\" sys.stdout.write('.') sys.stdout.flush()",
"where it was found, # so an interrupted search can be restarted #",
"how many fics they want # what to call the output csv #",
"= True # process list for new fic ids ids = [] for",
"url]) num_recorded_fic = num_recorded_fic + 1 else: break # # if you want",
"# 5 second delay between requests as per AO3's terms of service time.sleep(5)",
"you've gone too far and there are no more fic, end. # def",
"doesn't update until it's actually written to the csv. # If you've gone",
"global csv_name global num_requested_fic global multichap_only global tags parser = argparse.ArgumentParser(description='Scrape AO3 work",
"AO3's terms of service time.sleep(5) ids = get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info",
"flags to run again # note: do not reset seen_ids def reset(): global",
"(multichap_only): # FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"):",
"search URL') parser.add_argument( 'url', metavar='URL', help='a single URL pointing to an AO3 search",
"header_info # # navigate to a works listed page, # then extract all",
"tags_reader: tags.append(row[0]) header_info = str(args.header) return header_info # # navigate to a works",
"= url.find(key) # there is already a page indicator in the url if",
"base_url global url global csv_name global num_requested_fic global multichap_only global tags parser =",
"at the end of the url else: page = int(url[page_start_index:]) + 1 url",
"for new fic ids ids = [] for tag in works: if (multichap_only):",
"str(args.out_csv) # defaults to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1 else:",
"'a') as csvfile: wr = csv.writer(csvfile, delimiter=',') for id in ids: if (not_finished()):",
"0 csv_name = \"\" multichap_only = \"\" tags = [] # keep track",
"+ len(key) new_url = base_url[:start] + tag + \"%2C\" + base_url[start:] url =",
"single URL pointing to an AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output",
"page # e.g. # https://archiveofourown.org/works?utf8=%E2%9C%93&work_search%5Bsort_column%5D=word_count&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Blanguage_id%5D=&work_search%5Bcomplete%5D=0&commit=Sort+and+Filter&tag_id=Harry+Potter+-+J*d*+K*d*+Rowling # https://archiveofourown.org/tags/Harry%20Potter%20-%20J*d*%20K*d*%20Rowling/works?commit=Sort+and+Filter&page=2&utf8=%E2%9C%93&work_search%5Bcomplete%5D=0&work_search%5Blanguage_id%5D=&work_search%5Bother_tag_names%5D=&work_search%5Bquery%5D=&work_search%5Bsort_column%5D=word_count # how many fics they want",
"str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f) for row",
"not -1): # find where in the url the page indicator starts and",
"if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic + 1 else: break # #",
"csv_name = str(args.out_csv) # defaults to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic =",
"+ \"?page=2\" # modify the base_url to include the new tag, and save",
"(that is should contain at least one of, but not necessarily all of)",
"# # Ask the user for: # a url of a works listed",
"= new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # # after",
"# and the number of requested fics # def make_readme(): with open(csv_name +",
"global url key = \"page=\" start = url.find(key) # there is already a",
"new_url else: url = base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # # after every",
"AO3 work IDs given a search URL') parser.add_argument( 'url', metavar='URL', help='a single URL",
"starting url, # and the number of requested fics # def make_readme(): with",
"num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only != \"\": multichap_only = True",
"the url if (start is not -1): # find where in the url",
"+ 1 url = url[:page_start_index] + str(page) + url[page_end_index:] # if it's at",
"of fic: if (len(works) is 0): page_empty = True # process list for",
"works: if (multichap_only): # FOR MULTICHAP ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text",
"search to include a list of tags # (e.g. you want all fics",
"open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f) for row in tags_reader: tags.append(row[0]) header_info",
"the url the page indicator starts and ends page_start_index = start + len(key)",
"modifiers yet else: url = url + \"?page=2\" # modify the base_url to",
"num_requested_fic = 0 num_recorded_fic = 0 csv_name = \"\" multichap_only = \"\" tags",
"page_empty headers = {'user-agent' : header_info} req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text,",
"ids # def get_ids(header_info=''): global page_empty headers = {'user-agent' : header_info} req =",
"Only retrieve multichapter fics # Modify search to include a list of tags",
"else: return False # # include a text file with the starting url,",
"ids # # update the url to move to the next page #",
"of tags # (e.g. you want all fics tagged either \"romance\" or \"fluff\")",
"print (\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else: process_for_ids(header_info) print (\"That's all,",
"is 'a'): num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if",
"Modify search to include a list of tags # (e.g. you want all",
"add additional search terms (that is should contain at least one of, but",
"delimiter=',') for id in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic +",
"ids.append(t) seen_ids.append(t) else: t = tag.get('id') t = t[5:] if not t in",
"text_file: text_file.write(\"url: \" + url + \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) +",
"parser.add_argument( '--out_csv', default='work_ids', help='csv output file name') parser.add_argument( '--header', default='', help='user http header')",
"the user for: # a url of a works listed page # e.g.",
"of) # specify these in the tag csv, one per row. def get_args():",
"the starting url, # and the number of requested fics # def make_readme():",
"the csv. # If you've gone too far and there are no more",
"tags; the retrieved fics must have one or more such tags') args =",
"else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only != \"\": multichap_only =",
"int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only != \"\": multichap_only = True else: multichap_only",
"at least one of, but not necessarily all of) # specify these in",
"far, ao3 won't error, # but there will be no works listed #",
"\"?page=2\" # modify the base_url to include the new tag, and save to",
"def get_args(): global base_url global url global csv_name global num_requested_fic global multichap_only global",
"page_empty global num_recorded_fic page_empty = False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): #",
"sys import datetime import argparse page_empty = False base_url = \"\" url =",
"repeats: # this is separate from the temporary batch of ids # that",
"from bs4 import BeautifulSoup import re import time import requests import csv import",
"and then forgotten seen_ids = [] # # Ask the user for: #",
"# note that if you go too far, ao3 won't error, # but",
"-1): url = url + \"&page=2\" # there an no modifiers yet else:",
"terms (that is should contain at least one of, but not necessarily all",
"# # update the url to move to the next page # note",
"fic, end. # def not_finished(): if (page_empty): return False if (num_requested_fic == -1):",
"# otherwise compare recorded against requested. # recorded doesn't update until it's actually",
"retrieve fic text # Options: # Only retrieve multichapter fics # Modify search",
"open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" + url + \"\\n\" +",
"+ len(key) page_end_index = url.find(\"&\", page_start_index) # if it's in the middle of",
"the new tag, and save to global url def add_tag_to_url(tag): global url key",
"restarted # def write_ids_to_csv(ids): global num_recorded_fic with open(csv_name + \".csv\", 'a') as csvfile:",
"ONLY chaps = tag.find('dd', class_=\"chapters\") if (chaps.text != u\"1/1\"): t = tag.get('id') t",
"are other modifiers if (url.find(\"?\") is not -1): url = url + \"&page=2\"",
"what to call the output csv # # If you would like to",
"it's actually written to the csv. # If you've gone too far and",
"= str(args.tag_csv) if (tag_csv): with open(tag_csv, \"r\") as tags_f: tags_reader = csv.reader(tags_f) for",
"get_ids(header_info) write_ids_to_csv(ids) update_url_to_next_page() def main(): header_info = get_args() make_readme() print (\"processing...\\n\") if (len(tags)):",
"(\"processing...\\n\") if (len(tags)): for t in tags: print (\"Getting tag: \", t) reset()",
"for later use e.g. to retrieve fic text # Options: # Only retrieve",
"base_url + \"&work_search%5Bother_tag_names%5D=\" + tag # # after every page, write the gathered",
"# note: do not reset seen_ids def reset(): global page_empty global num_recorded_fic page_empty",
"ids # that are written to the csv and then forgotten seen_ids =",
"no works listed # def update_url_to_next_page(): global url key = \"page=\" start =",
"will be no works listed # def update_url_to_next_page(): global url key = \"page=\"",
"to the csv and then forgotten seen_ids = [] # # Ask the",
"where in the url the page indicator starts and ends page_start_index = start",
"in the url the page indicator starts and ends page_start_index = start +",
"csv.writer(csvfile, delimiter=',') for id in ids: if (not_finished()): wr.writerow([id, url]) num_recorded_fic = num_recorded_fic",
"t in seen_ids: ids.append(t) seen_ids.append(t) return ids # # update the url to",
"(num_recorded_fic < num_requested_fic): return True else: return False # # include a text",
"tags parser = argparse.ArgumentParser(description='Scrape AO3 work IDs given a search URL') parser.add_argument( 'url',",
"written to the csv. # If you've gone too far and there are",
"\"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) + len(key) new_url = base_url[:start] + tag",
"if (chaps.text != u\"1/1\"): t = tag.get('id') t = t[5:] if not t",
"req = requests.get(url, headers=headers) soup = BeautifulSoup(req.text, \"lxml\") # some responsiveness in the",
"not t in seen_ids: ids.append(t) seen_ids.append(t) return ids # # update the url",
"= \"\" url = \"\" num_requested_fic = 0 num_recorded_fic = 0 csv_name =",
"seen_ids.append(t) return ids # # update the url to move to the next",
"Will return in searched order # Saves ids to a csv for later",
"int(url[page_start_index:]) + 1 url = url[:page_start_index] + str(page) # there is no page",
"update_url_to_next_page() def main(): header_info = get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for t",
"= str(args.out_csv) # defaults to all if (str(args.num_to_retrieve) is 'a'): num_requested_fic = -1",
"# if it's in the middle of the url if (page_end_index is not",
"you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids for multichapter fics') parser.add_argument( '--tag_csv',",
"global url def add_tag_to_url(tag): global url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start =",
"if (page_empty): return False if (num_requested_fic == -1): return True else: if (num_recorded_fic",
"open(csv_name + \".csv\", 'a') as csvfile: wr = csv.writer(csvfile, delimiter=',') for id in",
"searched order # Saves ids to a csv for later use e.g. to",
"the tag csv, one per row. def get_args(): global base_url global url global",
"return header_info # # navigate to a works listed page, # then extract",
"num_requested_fic = -1 else: num_requested_fic = int(args.num_to_retrieve) multichap_only = str(args.multichapter_only) if multichap_only !=",
"# what to call the output csv # # If you would like",
"there is already a page indicator in the url if (start is not",
"later use e.g. to retrieve fic text # Options: # Only retrieve multichapter",
"you go too far, ao3 won't error, # but there will be no",
"url[page_end_index:] # if it's at the end of the url else: page =",
"# navigate to a works listed page, # then extract all work ids",
"fic: if (len(works) is 0): page_empty = True # process list for new",
"num_requested_fic): return True else: return False # # include a text file with",
"global num_recorded_fic page_empty = False num_recorded_fic = 0 def process_for_ids(header_info=''): while(not_finished()): # 5",
"from an AO3 search # Will return in searched order # Saves ids",
"fics tagged either \"romance\" or \"fluff\") from bs4 import BeautifulSoup import re import",
"= tag.get('id') t = t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) return",
"help='csv output file name') parser.add_argument( '--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a',",
"print (\"processing...\\n\") if (len(tags)): for t in tags: print (\"Getting tag: \", t)",
"import time import requests import csv import sys import datetime import argparse page_empty",
"# Retrieve fic ids from an AO3 search # Will return in searched",
"page 1 else: # there are other modifiers if (url.find(\"?\") is not -1):",
"t[5:] if not t in seen_ids: ids.append(t) seen_ids.append(t) return ids # # update",
"name') parser.add_argument( '--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic",
"# # if you want everything, you're not done # otherwise compare recorded",
"far and there are no more fic, end. # def not_finished(): if (page_empty):",
"such tags') args = parser.parse_args() url = args.url csv_name = str(args.out_csv) # defaults",
"text file with the starting url, # and the number of requested fics",
"parser.parse_args() url = args.url csv_name = str(args.out_csv) # defaults to all if (str(args.num_to_retrieve)",
"+ tag # # after every page, write the gathered ids # to",
"want everything, you're not done # otherwise compare recorded against requested. # recorded",
"\" + url + \"\\n\" + \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" +",
"one or more such tags') args = parser.parse_args() url = args.url csv_name =",
"process_for_ids(header_info=''): while(not_finished()): # 5 second delay between requests as per AO3's terms of",
"text # Options: # Only retrieve multichapter fics # Modify search to include",
"not -1): page = int(url[page_start_index:page_end_index]) + 1 url = url[:page_start_index] + str(page) +",
"'--header', default='', help='user http header') parser.add_argument( '--num_to_retrieve', default='a', help='how many fic ids you",
"written to the csv and then forgotten seen_ids = [] # # Ask",
"global base_url global url global csv_name global num_requested_fic global multichap_only global tags parser",
"+ \"num_requested_fic: \" + str(num_requested_fic) + \"\\n\" + \"retreived on: \" + str(datetime.datetime.now()))",
"'--multichapter_only', default='', help='only retrieve ids for multichapter fics') parser.add_argument( '--tag_csv', default='', help='provide an",
"search page') parser.add_argument( '--out_csv', default='work_ids', help='csv output file name') parser.add_argument( '--header', default='', help='user",
"default='a', help='how many fic ids you want') parser.add_argument( '--multichapter_only', default='', help='only retrieve ids",
"# find where in the url the page indicator starts and ends page_start_index",
"page indicator in the url if (start is not -1): # find where",
"def main(): header_info = get_args() make_readme() print (\"processing...\\n\") if (len(tags)): for t in",
"return True else: if (num_recorded_fic < num_requested_fic): return True else: return False #",
"# Ask the user for: # a url of a works listed page",
"and ends page_start_index = start + len(key) page_end_index = url.find(\"&\", page_start_index) # if",
"def make_readme(): with open(csv_name + \"_readme.txt\", \"w\") as text_file: text_file.write(\"url: \" + url",
"import BeautifulSoup import re import time import requests import csv import sys import",
"help='a single URL pointing to an AO3 search page') parser.add_argument( '--out_csv', default='work_ids', help='csv",
"url key = \"&work_search%5Bother_tag_names%5D=\" if (base_url.find(key)): start = base_url.find(key) + len(key) new_url =",
"(\"Getting tag: \", t) reset() add_tag_to_url(t) process_for_ids(header_info) else: process_for_ids(header_info) print (\"That's all, folks.\")",
"many fics they want # what to call the output csv # #",
"global page_empty headers = {'user-agent' : header_info} req = requests.get(url, headers=headers) soup ="
] |
[
"d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans le pickable de ma 'cuisine'",
"des deux côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination",
"#méthode ajouter un objet dans une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get",
"def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet dans une salle numberOfThisItem =",
"la valeur associée à la clef itemId, # et s'il n'y en a",
"nouvelle sortie dans newExit self.exits[exitName] = newExit #dans le dico exits{} de mon",
"addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet dans une salle numberOfThisItem = self.pickables.get(itemId,",
"[] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet dans une salle numberOfThisItem",
"description self.pickables = {} #les objets ramassables (pickables) sont actuellement un dictionnaire vide,",
"newExit self.exits[exitName] = newExit #dans le dico exits{} de mon objet ('cuisine'), je",
"est mon objet qui suit le plan Room self.id = id self.name =",
"def __init__(self, id, name, description): #rappel : self est mon objet qui suit",
"un dictionnaire vide, qu'on va remplir plus tard, on ne les retrouve pas",
"addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter",
"le contructeur #init //// = je stocke ma nouvelle sortie dans newExit self.exits[exitName]",
"item de plus au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans",
"# actuelle 'cuisine', je lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1,",
"#rappel : self est mon objet qui suit le plan Room self.id =",
"id, name, description): #rappel : self est mon objet qui suit le plan",
"ne les retrouve pas dans les paramètres de ma fonction init self.inspectables =",
"Room du coup (ex 'cuisine') #Création d'un nouvel objet de type Exit (ex",
"charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter une sortie (d'un seul côté)",
"clef qui # vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la",
"(d'un seul côté) newExit = Exit(self, destination) #self ici se trouve dans Room,",
"self.inspectables = [] self.exits = {} self.characters = [] def addPickable(self, itemId, nbOfThisItem):",
"trouve dans Room, il ne fait référence qu'à une Room du coup (ex",
"+= nbOfThisItem #j'ajoute x item de plus au nombre d'items self.pickables[itemId] = numberOfThisItem",
"ne fait référence qu'à une Room du coup (ex 'cuisine') #Création d'un nouvel",
"en lui donnant les paramètres attendus par le contructeur #init //// = je",
"ajouter un objet dans une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me",
"= numberOfThisItem #numberfThisItem est stocké dans le pickable de ma 'cuisine' qui a",
"self.description = description self.pickables = {} #les objets ramassables (pickables) sont actuellement un",
"'cuisine', je lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def",
"= name self.description = description self.pickables = {} #les objets ramassables (pickables) sont",
"il ne fait référence qu'à une Room du coup (ex 'cuisine') #Création d'un",
"stocke ma nouvelle sortie dans newExit self.exits[exitName] = newExit #dans le dico exits{}",
"classe Room. A la destination de ma salle # actuelle 'cuisine', je lui",
"la même sortie des deux côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine'",
"0 numberOfThisItem += nbOfThisItem #j'ajoute x item de plus au nombre d'items self.pickables[itemId]",
"# vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la même sortie",
"ramassables (pickables) sont actuellement un dictionnaire vide, qu'on va remplir plus tard, on",
"sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2):",
"= self.pickables.get(itemId, 0) #fonction get me retourne la valeur associée à la clef",
"salle # actuelle 'cuisine', je lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination,",
"#destination est associé à la classe Room. A la destination de ma salle",
"a pas, il me retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x item",
"#j'ajoute x item de plus au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est",
"objet ('cuisine'), je stocke newExit (associé à la clef qui # vaut exitName)",
"on ne les retrouve pas dans les paramètres de ma fonction init self.inspectables",
"destination) #self ici se trouve dans Room, il ne fait référence qu'à une",
"coup (ex 'cuisine') #Création d'un nouvel objet de type Exit (ex : new",
"@staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination,",
"ma salle # actuelle 'cuisine', je lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source,",
"newExit (associé à la clef qui # vaut exitName) def addDoubleExit(self, destination, exitName1,",
"exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2) return hiddenDoubleExit",
"x item de plus au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké",
"get me retourne la valeur associée à la clef itemId, # et s'il",
"dans Room, il ne fait référence qu'à une Room du coup (ex 'cuisine')",
"self est mon objet qui suit le plan Room self.id = id self.name",
"de type Exit (ex : new Exit en JS) en lui donnant les",
"à la clef qui # vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode",
"stocké dans le pickable de ma 'cuisine' qui a pour clef itemId def",
"pas dans les paramètres de ma fonction init self.inspectables = [] self.exits =",
"numberOfThisItem += nbOfThisItem #j'ajoute x item de plus au nombre d'items self.pickables[itemId] =",
"sur 'cuisine' destination.addExit(self, exitName2) #destination est associé à la classe Room. A la",
"exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2) return hiddenDoubleExit def",
"objets ramassables (pickables) sont actuellement un dictionnaire vide, qu'on va remplir plus tard,",
"//// = je stocke ma nouvelle sortie dans newExit self.exits[exitName] = newExit #dans",
"self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est associé à",
"dans le pickable de ma 'cuisine' qui a pour clef itemId def addInspectable(self,",
"par le contructeur #init //// = je stocke ma nouvelle sortie dans newExit",
"ma nouvelle sortie dans newExit self.exits[exitName] = newExit #dans le dico exits{} de",
"hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2) return hiddenDoubleExit def __repr__(self): return",
"plus tard, on ne les retrouve pas dans les paramètres de ma fonction",
"#méthode ajouter une sortie (d'un seul côté) newExit = Exit(self, destination) #self ici",
"= je stocke ma nouvelle sortie dans newExit self.exits[exitName] = newExit #dans le",
"actuelle 'cuisine', je lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2):",
": self est mon objet qui suit le plan Room self.id = id",
"plan Room self.id = id self.name = name self.description = description self.pickables =",
"référence qu'à une Room du coup (ex 'cuisine') #Création d'un nouvel objet de",
"def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la même sortie des deux côtés",
"de ma salle # actuelle 'cuisine', je lui ajoute une sortie @staticmethod def",
"ajouter la même sortie des deux côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur",
"a pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def",
"dictionnaire vide, qu'on va remplir plus tard, on ne les retrouve pas dans",
"me retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x item de plus au",
"description): #rappel : self est mon objet qui suit le plan Room self.id",
"dans une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me retourne la valeur",
"une Room du coup (ex 'cuisine') #Création d'un nouvel objet de type Exit",
"il me retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x item de plus",
"def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter une sortie (d'un",
"= {} #les objets ramassables (pickables) sont actuellement un dictionnaire vide, qu'on va",
"= newExit #dans le dico exits{} de mon objet ('cuisine'), je stocke newExit",
"exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est associé à la",
"itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName):",
"0) #fonction get me retourne la valeur associée à la clef itemId, #",
"self.exits[exitName] = newExit #dans le dico exits{} de mon objet ('cuisine'), je stocke",
"newExit #dans le dico exits{} de mon objet ('cuisine'), je stocke newExit (associé",
"contructeur #init //// = je stocke ma nouvelle sortie dans newExit self.exits[exitName] =",
": new Exit en JS) en lui donnant les paramètres attendus par le",
"paramètres de ma fonction init self.inspectables = [] self.exits = {} self.characters =",
"destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2) return",
"exit import Exit class Room: #objet room def __init__(self, id, name, description): #rappel",
"une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me retourne la valeur associée",
"self.pickables.get(itemId, 0) #fonction get me retourne la valeur associée à la clef itemId,",
"pickable de ma 'cuisine' qui a pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem)",
"inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter une",
"lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source,",
"stocke newExit (associé à la clef qui # vaut exitName) def addDoubleExit(self, destination,",
"type Exit (ex : new Exit en JS) en lui donnant les paramètres",
"exitName2): #méthode ajouter la même sortie des deux côtés self.addExit(destination, exitName1) #exécution méthode",
"dans les paramètres de ma fonction init self.inspectables = [] self.exits = {}",
"numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me retourne la valeur associée à la",
"valeur associée à la clef itemId, # et s'il n'y en a pas,",
"les paramètres de ma fonction init self.inspectables = [] self.exits = {} self.characters",
"qui suit le plan Room self.id = id self.name = name self.description =",
"Room, il ne fait référence qu'à une Room du coup (ex 'cuisine') #Création",
"Exit(self, destination) #self ici se trouve dans Room, il ne fait référence qu'à",
"= Exit(self, destination) #self ici se trouve dans Room, il ne fait référence",
"destination de ma salle # actuelle 'cuisine', je lui ajoute une sortie @staticmethod",
"room def __init__(self, id, name, description): #rappel : self est mon objet qui",
"du coup (ex 'cuisine') #Création d'un nouvel objet de type Exit (ex :",
"self.pickables = {} #les objets ramassables (pickables) sont actuellement un dictionnaire vide, qu'on",
"(ex : new Exit en JS) en lui donnant les paramètres attendus par",
"nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans le pickable de ma",
"itemId, nbOfThisItem): #méthode ajouter un objet dans une salle numberOfThisItem = self.pickables.get(itemId, 0)",
"ma fonction init self.inspectables = [] self.exits = {} self.characters = [] def",
"dans newExit self.exits[exitName] = newExit #dans le dico exits{} de mon objet ('cuisine'),",
"'cuisine' destination.addExit(self, exitName2) #destination est associé à la classe Room. A la destination",
"et s'il n'y en a pas, il me retourne ici 0 numberOfThisItem +=",
"Room self.id = id self.name = name self.description = description self.pickables = {}",
"ici se trouve dans Room, il ne fait référence qu'à une Room du",
"addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est associé à la classe Room. A",
"nbOfThisItem): #méthode ajouter un objet dans une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction",
"__init__(self, id, name, description): #rappel : self est mon objet qui suit le",
"exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la même sortie des deux",
"les retrouve pas dans les paramètres de ma fonction init self.inspectables = []",
"[] self.exits = {} self.characters = [] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter",
"me retourne la valeur associée à la clef itemId, # et s'il n'y",
"de ma 'cuisine' qui a pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def",
"JS) en lui donnant les paramètres attendus par le contructeur #init //// =",
"#objet room def __init__(self, id, name, description): #rappel : self est mon objet",
"#numberfThisItem est stocké dans le pickable de ma 'cuisine' qui a pour clef",
"#fonction get me retourne la valeur associée à la clef itemId, # et",
"'cuisine' qui a pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName):",
"je lui ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game,",
"une sortie (d'un seul côté) newExit = Exit(self, destination) #self ici se trouve",
"donnant les paramètres attendus par le contructeur #init //// = je stocke ma",
"de mon objet ('cuisine'), je stocke newExit (associé à la clef qui #",
"fait référence qu'à une Room du coup (ex 'cuisine') #Création d'un nouvel objet",
"def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1,",
"clef itemId, # et s'il n'y en a pas, il me retourne ici",
"A la destination de ma salle # actuelle 'cuisine', je lui ajoute une",
"actuellement un dictionnaire vide, qu'on va remplir plus tard, on ne les retrouve",
"= [] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet dans une salle",
"plus au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans le pickable",
"#self ici se trouve dans Room, il ne fait référence qu'à une Room",
"numberOfThisItem #numberfThisItem est stocké dans le pickable de ma 'cuisine' qui a pour",
"(pickables) sont actuellement un dictionnaire vide, qu'on va remplir plus tard, on ne",
"qu'on va remplir plus tard, on ne les retrouve pas dans les paramètres",
"'cuisine') #Création d'un nouvel objet de type Exit (ex : new Exit en",
"le plan Room self.id = id self.name = name self.description = description self.pickables",
"#dans le dico exits{} de mon objet ('cuisine'), je stocke newExit (associé à",
"clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination,",
"destination.addExit(self, exitName2) #destination est associé à la classe Room. A la destination de",
"= {} self.characters = [] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet",
"va remplir plus tard, on ne les retrouve pas dans les paramètres de",
"se trouve dans Room, il ne fait référence qu'à une Room du coup",
"ajouter une sortie (d'un seul côté) newExit = Exit(self, destination) #self ici se",
"la classe Room. A la destination de ma salle # actuelle 'cuisine', je",
"self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter une sortie",
"vide, qu'on va remplir plus tard, on ne les retrouve pas dans les",
"exitName): #méthode ajouter une sortie (d'un seul côté) newExit = Exit(self, destination) #self",
"def addExit(self, destination, exitName): #méthode ajouter une sortie (d'un seul côté) newExit =",
"Room. A la destination de ma salle # actuelle 'cuisine', je lui ajoute",
"Exit (ex : new Exit en JS) en lui donnant les paramètres attendus",
"le pickable de ma 'cuisine' qui a pour clef itemId def addInspectable(self, inspectableItem):",
"addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la même sortie des deux côtés self.addExit(destination,",
"deux côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est",
"s'il n'y en a pas, il me retourne ici 0 numberOfThisItem += nbOfThisItem",
"nouvel objet de type Exit (ex : new Exit en JS) en lui",
"exitName2) #destination est associé à la classe Room. A la destination de ma",
"est associé à la classe Room. A la destination de ma salle #",
"à la classe Room. A la destination de ma salle # actuelle 'cuisine',",
"self.exits = {} self.characters = [] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un",
"je stocke newExit (associé à la clef qui # vaut exitName) def addDoubleExit(self,",
"associée à la clef itemId, # et s'il n'y en a pas, il",
"new Exit en JS) en lui donnant les paramètres attendus par le contructeur",
"addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2)",
"Room: #objet room def __init__(self, id, name, description): #rappel : self est mon",
"remplir plus tard, on ne les retrouve pas dans les paramètres de ma",
"= [] self.exits = {} self.characters = [] def addPickable(self, itemId, nbOfThisItem): #méthode",
"un objet dans une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me retourne",
"exitName1, exitName2): #méthode ajouter la même sortie des deux côtés self.addExit(destination, exitName1) #exécution",
"qui a pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName)",
"#exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est associé à la classe",
"Exit en JS) en lui donnant les paramètres attendus par le contructeur #init",
"la clef qui # vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter",
"Exit class Room: #objet room def __init__(self, id, name, description): #rappel : self",
"= description self.pickables = {} #les objets ramassables (pickables) sont actuellement un dictionnaire",
"init self.inspectables = [] self.exits = {} self.characters = [] def addPickable(self, itemId,",
"méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est associé à la classe Room.",
"côté) newExit = Exit(self, destination) #self ici se trouve dans Room, il ne",
"(associé à la clef qui # vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2):",
"sortie des deux côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2)",
"dico exits{} de mon objet ('cuisine'), je stocke newExit (associé à la clef",
"le dico exits{} de mon objet ('cuisine'), je stocke newExit (associé à la",
"from exit import Exit class Room: #objet room def __init__(self, id, name, description):",
"au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans le pickable de",
"paramètres attendus par le contructeur #init //// = je stocke ma nouvelle sortie",
"mon objet ('cuisine'), je stocke newExit (associé à la clef qui # vaut",
"associé à la classe Room. A la destination de ma salle # actuelle",
"sortie dans newExit self.exits[exitName] = newExit #dans le dico exits{} de mon objet",
"fonction init self.inspectables = [] self.exits = {} self.characters = [] def addPickable(self,",
"pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self,",
"#Création d'un nouvel objet de type Exit (ex : new Exit en JS)",
"itemId, # et s'il n'y en a pas, il me retourne ici 0",
"la destination de ma salle # actuelle 'cuisine', je lui ajoute une sortie",
"nbOfThisItem #j'ajoute x item de plus au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem",
"les paramètres attendus par le contructeur #init //// = je stocke ma nouvelle",
"vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la même sortie des",
"#méthode ajouter la même sortie des deux côtés self.addExit(destination, exitName1) #exécution méthode addExit()",
"destination, exitName): #méthode ajouter une sortie (d'un seul côté) newExit = Exit(self, destination)",
"# et s'il n'y en a pas, il me retourne ici 0 numberOfThisItem",
"suit le plan Room self.id = id self.name = name self.description = description",
"à la clef itemId, # et s'il n'y en a pas, il me",
"{} #les objets ramassables (pickables) sont actuellement un dictionnaire vide, qu'on va remplir",
"source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2) return hiddenDoubleExit def __repr__(self): return \"Room(\"+self.name+\")\"",
"name self.description = description self.pickables = {} #les objets ramassables (pickables) sont actuellement",
"la clef itemId, # et s'il n'y en a pas, il me retourne",
"qui # vaut exitName) def addDoubleExit(self, destination, exitName1, exitName2): #méthode ajouter la même",
"self.id = id self.name = name self.description = description self.pickables = {} #les",
"retourne la valeur associée à la clef itemId, # et s'il n'y en",
"en a pas, il me retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x",
"#init //// = je stocke ma nouvelle sortie dans newExit self.exits[exitName] = newExit",
"sortie (d'un seul côté) newExit = Exit(self, destination) #self ici se trouve dans",
"class Room: #objet room def __init__(self, id, name, description): #rappel : self est",
"objet qui suit le plan Room self.id = id self.name = name self.description",
"def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode",
"mon objet qui suit le plan Room self.id = id self.name = name",
"self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter une sortie (d'un seul côté) newExit",
"(ex 'cuisine') #Création d'un nouvel objet de type Exit (ex : new Exit",
"seul côté) newExit = Exit(self, destination) #self ici se trouve dans Room, il",
"de ma fonction init self.inspectables = [] self.exits = {} self.characters = []",
"de plus au nombre d'items self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans le",
"sont actuellement un dictionnaire vide, qu'on va remplir plus tard, on ne les",
"attendus par le contructeur #init //// = je stocke ma nouvelle sortie dans",
"('cuisine'), je stocke newExit (associé à la clef qui # vaut exitName) def",
"{} self.characters = [] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet dans",
"def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1, exitName2=exitName2): source.addDoubleExit(destination, exitName1, exitName2) return hiddenDoubleExit def __repr__(self):",
"d'un nouvel objet de type Exit (ex : new Exit en JS) en",
"côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self, exitName2) #destination est associé",
"import Exit class Room: #objet room def __init__(self, id, name, description): #rappel :",
"tard, on ne les retrouve pas dans les paramètres de ma fonction init",
"salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me retourne la valeur associée à",
"ma 'cuisine' qui a pour clef itemId def addInspectable(self, inspectableItem): self.inspectables.append(inspectableItem) def addCharacter(self,",
"id self.name = name self.description = description self.pickables = {} #les objets ramassables",
"addExit(self, destination, exitName): #méthode ajouter une sortie (d'un seul côté) newExit = Exit(self,",
"ajoute une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination,",
"une sortie @staticmethod def addDoubleExitBuilder(source, destination, exitName1, exitName2): def hiddenDoubleExit(game, source=source, destination=destination, exitName1=exitName1,",
"newExit = Exit(self, destination) #self ici se trouve dans Room, il ne fait",
"n'y en a pas, il me retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute",
"self.name = name self.description = description self.pickables = {} #les objets ramassables (pickables)",
"pas, il me retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x item de",
"est stocké dans le pickable de ma 'cuisine' qui a pour clef itemId",
"lui donnant les paramètres attendus par le contructeur #init //// = je stocke",
"ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x item de plus au nombre d'items",
"objet dans une salle numberOfThisItem = self.pickables.get(itemId, 0) #fonction get me retourne la",
"#les objets ramassables (pickables) sont actuellement un dictionnaire vide, qu'on va remplir plus",
"addCharacter(self, charactersName): self.characters.append(charactersName) def addExit(self, destination, exitName): #méthode ajouter une sortie (d'un seul",
"exits{} de mon objet ('cuisine'), je stocke newExit (associé à la clef qui",
"retourne ici 0 numberOfThisItem += nbOfThisItem #j'ajoute x item de plus au nombre",
"qu'à une Room du coup (ex 'cuisine') #Création d'un nouvel objet de type",
"retrouve pas dans les paramètres de ma fonction init self.inspectables = [] self.exits",
"name, description): #rappel : self est mon objet qui suit le plan Room",
"objet de type Exit (ex : new Exit en JS) en lui donnant",
"self.pickables[itemId] = numberOfThisItem #numberfThisItem est stocké dans le pickable de ma 'cuisine' qui",
"en JS) en lui donnant les paramètres attendus par le contructeur #init ////",
"= id self.name = name self.description = description self.pickables = {} #les objets",
"self.characters = [] def addPickable(self, itemId, nbOfThisItem): #méthode ajouter un objet dans une",
"destination, exitName1, exitName2): #méthode ajouter la même sortie des deux côtés self.addExit(destination, exitName1)",
"je stocke ma nouvelle sortie dans newExit self.exits[exitName] = newExit #dans le dico",
"même sortie des deux côtés self.addExit(destination, exitName1) #exécution méthode addExit() sur 'cuisine' destination.addExit(self,"
] |
[
"west: return 'west' elif state in south_west: return 'south_west' elif state in south_east:",
"'IN', 'ND'] if state in west: return 'west' elif state in south_west: return",
"'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ] west = ['CA', 'OR', 'UT','WA',",
"'ND'] if state in west: return 'west' elif state in south_west: return 'south_west'",
"'MD', 'VT', 'NH', 'ME'] south_west = ['AZ', 'TX', 'NM', 'OK'] south_east = ['GA',",
"'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ] west = ['CA', 'OR',",
"['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS',",
"'NE', 'IN', 'ND'] if state in west: return 'west' elif state in south_west:",
"'NY', 'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west = ['AZ', 'TX', 'NM',",
"'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west = ['AZ', 'TX', 'NM', 'OK']",
"'TX', 'NM', 'OK'] south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL',",
"'west' elif state in south_west: return 'south_west' elif state in south_east: return 'south_east'",
"['CT', 'NY', 'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west = ['AZ', 'TX',",
"'MT', 'HI', 'WY', 'ID'] mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI',",
"'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ] west",
"if state in west: return 'west' elif state in south_west: return 'south_west' elif",
"elif state in south_east: return 'south_east' elif state in mid_west: return 'mid_west' elif",
"state in south_east: return 'south_east' elif state in mid_west: return 'mid_west' elif state",
"= ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE',",
"south_west: return 'south_west' elif state in south_east: return 'south_east' elif state in mid_west:",
"'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west = ['IL', 'MO',",
"'IA', 'NE', 'IN', 'ND'] if state in west: return 'west' elif state in",
"'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west = ['AZ', 'TX', 'NM', 'OK'] south_east",
"'OK'] south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC',",
"'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN'",
"'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if state in west: return 'west' elif",
"south_west = ['AZ', 'TX', 'NM', 'OK'] south_east = ['GA', 'NC', 'VA', 'FL', 'KY',",
"= ['AZ', 'TX', 'NM', 'OK'] south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC',",
"return 'south_west' elif state in south_east: return 'south_east' elif state in mid_west: return",
"'AR', 'DE', 'MS', 'TN' ] west = ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK',",
"= ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND']",
"south_east: return 'south_east' elif state in mid_west: return 'mid_west' elif state in north_east:",
"return 'south_east' elif state in mid_west: return 'mid_west' elif state in north_east: return",
"'ID'] mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE',",
"'south_west' elif state in south_east: return 'south_east' elif state in mid_west: return 'mid_west'",
"in south_west: return 'south_west' elif state in south_east: return 'south_east' elif state in",
"'NM', 'OK'] south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV',",
"state in south_west: return 'south_west' elif state in south_east: return 'south_east' elif state",
"'TN' ] west = ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY',",
"mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN',",
"in south_east: return 'south_east' elif state in mid_west: return 'mid_west' elif state in",
"'MS', 'TN' ] west = ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI',",
"state in west: return 'west' elif state in south_west: return 'south_west' elif state",
"'NH', 'ME'] south_west = ['AZ', 'TX', 'NM', 'OK'] south_east = ['GA', 'NC', 'VA',",
"'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if state",
"'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ] west =",
"'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ] west = ['CA',",
"west = ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west",
"'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west = ['IL', 'MO', 'MN', 'OH', 'WI',",
"] west = ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID']",
"def finding_regions(state): north_east = ['CT', 'NY', 'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME']",
"north_east = ['CT', 'NY', 'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west =",
"south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR',",
"'DE', 'MS', 'TN' ] west = ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT',",
"'VA', 'FL', 'KY', 'SC', 'LA', 'AL', 'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ]",
"= ['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west =",
"['CA', 'OR', 'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west = ['IL',",
"'WV', 'DC', 'AR', 'DE', 'MS', 'TN' ] west = ['CA', 'OR', 'UT','WA', 'CO',",
"'UT','WA', 'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west = ['IL', 'MO', 'MN',",
"'VT', 'NH', 'ME'] south_west = ['AZ', 'TX', 'NM', 'OK'] south_east = ['GA', 'NC',",
"= ['CT', 'NY', 'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west = ['AZ',",
"'CO', 'NV', 'AK', 'MT', 'HI', 'WY', 'ID'] mid_west = ['IL', 'MO', 'MN', 'OH',",
"'WY', 'ID'] mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA',",
"'DC', 'AR', 'DE', 'MS', 'TN' ] west = ['CA', 'OR', 'UT','WA', 'CO', 'NV',",
"'ME'] south_west = ['AZ', 'TX', 'NM', 'OK'] south_east = ['GA', 'NC', 'VA', 'FL',",
"'AK', 'MT', 'HI', 'WY', 'ID'] mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS',",
"return 'west' elif state in south_west: return 'south_west' elif state in south_east: return",
"'HI', 'WY', 'ID'] mid_west = ['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD',",
"elif state in south_west: return 'south_west' elif state in south_east: return 'south_east' elif",
"['AZ', 'TX', 'NM', 'OK'] south_east = ['GA', 'NC', 'VA', 'FL', 'KY', 'SC', 'LA',",
"'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if state in west:",
"'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if state in west: return",
"'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west = ['AZ', 'TX', 'NM', 'OK'] south_east =",
"finding_regions(state): north_east = ['CT', 'NY', 'PA', 'NJ', 'RI','MA', 'MD', 'VT', 'NH', 'ME'] south_west",
"'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if state in",
"['IL', 'MO', 'MN', 'OH', 'WI', 'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if",
"'KS', 'MI', 'SD', 'IA', 'NE', 'IN', 'ND'] if state in west: return 'west'",
"'south_east' elif state in mid_west: return 'mid_west' elif state in north_east: return 'north_east'",
"in west: return 'west' elif state in south_west: return 'south_west' elif state in",
"'SD', 'IA', 'NE', 'IN', 'ND'] if state in west: return 'west' elif state"
] |
[
"y = model(x) params_dict = dict(model.named_parameters()) params_dict['x'] = x g = make_dot(y, params=params_dict)",
"ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True)",
"img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y = model(x)",
"ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3,",
"= model(x) params_dict = dict(model.named_parameters()) params_dict['x'] = x g = make_dot(y, params=params_dict) g.view()",
"structure from graphviz import Digraph import torch from torch.autograd import Variable from torchviz",
"network structure from graphviz import Digraph import torch from torch.autograd import Variable from",
"from graphviz import Digraph import torch from torch.autograd import Variable from torchviz import",
"torch.autograd import Variable from torchviz import make_dot from nets.ResNets import ResNet model =",
"embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y =",
"import Digraph import torch from torch.autograd import Variable from torchviz import make_dot from",
"is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y = model(x) params_dict =",
"num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y",
"224, 224), requires_grad=True) y = model(x) params_dict = dict(model.named_parameters()) params_dict['x'] = x g",
"from torch.autograd import Variable from torchviz import make_dot from nets.ResNets import ResNet model",
"model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224,",
"= Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y = model(x) params_dict = dict(model.named_parameters()) params_dict['x']",
"import torch from torch.autograd import Variable from torchviz import make_dot from nets.ResNets import",
"# Draw network structure from graphviz import Digraph import torch from torch.autograd import",
"from torchviz import make_dot from nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128,",
"requires_grad=True) y = model(x) params_dict = dict(model.named_parameters()) params_dict['x'] = x g = make_dot(y,",
"import Variable from torchviz import make_dot from nets.ResNets import ResNet model = ResNet(resnet='resnet18',",
"x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y = model(x) params_dict = dict(model.named_parameters())",
"3, 224, 224), requires_grad=True) y = model(x) params_dict = dict(model.named_parameters()) params_dict['x'] = x",
"= ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224),",
"<filename>UsefulTools/Classify/scripts/vis.py<gh_stars>10-100 # Draw network structure from graphviz import Digraph import torch from torch.autograd",
"torchviz import make_dot from nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224,",
"torch from torch.autograd import Variable from torchviz import make_dot from nets.ResNets import ResNet",
"make_dot from nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False)",
"Draw network structure from graphviz import Digraph import torch from torch.autograd import Variable",
"Variable from torchviz import make_dot from nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575,",
"graphviz import Digraph import torch from torch.autograd import Variable from torchviz import make_dot",
"Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y = model(x) params_dict = dict(model.named_parameters()) params_dict['x'] =",
"import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1,",
"Digraph import torch from torch.autograd import Variable from torchviz import make_dot from nets.ResNets",
"is_fc=True, is_AvgPool=False) x = Variable(torch.randn(1, 3, 224, 224), requires_grad=True) y = model(x) params_dict",
"224), requires_grad=True) y = model(x) params_dict = dict(model.named_parameters()) params_dict['x'] = x g =",
"from nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x",
"nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True, is_AvgPool=False) x =",
"import make_dot from nets.ResNets import ResNet model = ResNet(resnet='resnet18', num_classes=10575, embeddings_num=128, img_size=224, is_fc=True,"
] |
[
"HTTPError(404) user = User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if not user.banned: user.banned",
"from doodle.core.models.user import User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id):",
"import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id) if not comment",
"tornado.web import HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user import User from ..base_handler",
"comment = Comment.get_by_id(comment_id) if not comment and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id)",
"comment_id): comment = Comment.get_by_id(comment_id) if not comment and comment.user_id: raise HTTPError(404) user =",
"coding: utf-8 -*- from tornado.web import HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user",
"Comment from doodle.core.models.user import User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self,",
"User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id)",
"# -*- coding: utf-8 -*- from tornado.web import HTTPError from doodle.core.models.comment import Comment",
"import User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment =",
"class BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id) if not comment and comment.user_id:",
"from doodle.core.models.comment import Comment from doodle.core.models.user import User from ..base_handler import AdminHandler class",
"post(self, comment_id): comment = Comment.get_by_id(comment_id) if not comment and comment.user_id: raise HTTPError(404) user",
"import Comment from doodle.core.models.user import User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def",
"HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user import User from ..base_handler import AdminHandler",
"user = User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if not user.banned: user.banned =",
"if not user: raise HTTPError(404) if not user.banned: user.banned = True user.save(relative=False, transactional=False)",
"and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if",
"AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id) if not comment and",
"from tornado.web import HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user import User from",
"if not comment and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if not user:",
"from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id) if",
"def post(self, comment_id): comment = Comment.get_by_id(comment_id) if not comment and comment.user_id: raise HTTPError(404)",
"doodle.core.models.user import User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment",
"BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id) if not comment and comment.user_id: raise",
"Comment.get_by_id(comment_id) if not comment and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if not",
"User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if not user.banned: user.banned = True user.save(relative=False,",
"..base_handler import AdminHandler class BanUserHandler(AdminHandler): def post(self, comment_id): comment = Comment.get_by_id(comment_id) if not",
"<reponame>keakon/Doodle<gh_stars>10-100 # -*- coding: utf-8 -*- from tornado.web import HTTPError from doodle.core.models.comment import",
"= Comment.get_by_id(comment_id) if not comment and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if",
"-*- from tornado.web import HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user import User",
"-*- coding: utf-8 -*- from tornado.web import HTTPError from doodle.core.models.comment import Comment from",
"raise HTTPError(404) user = User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if not user.banned:",
"utf-8 -*- from tornado.web import HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user import",
"= User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if not user.banned: user.banned = True",
"comment and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if not user: raise HTTPError(404)",
"not comment and comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if not user: raise",
"import HTTPError from doodle.core.models.comment import Comment from doodle.core.models.user import User from ..base_handler import",
"doodle.core.models.comment import Comment from doodle.core.models.user import User from ..base_handler import AdminHandler class BanUserHandler(AdminHandler):",
"comment.user_id: raise HTTPError(404) user = User.get_by_id(comment.user_id) if not user: raise HTTPError(404) if not"
] |
[
"Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self):",
"in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None, 2,",
"self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約',",
") def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天',",
"exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1,",
"'下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in",
"2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts = [",
"from unittest import TestCase from uttut.elements import Datum, Entity, Intent from ..partition_by_entities import",
"True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output = partition_by_entities(self.datum_wo_entity, False) self.assertEqual(([['薄餡亂入']], [None]),",
"value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0), ] self.datum =",
"value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [",
"2, None], ) def test_datum_wo_entity(self): # do not include origin output = partition_by_entities(self.datum_wo_entity,",
"# do not include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) #",
"0, None, 1, None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum,",
"class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3,",
"partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'],",
"['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts,",
"import TestCase from uttut.elements import Datum, Entity, Intent from ..partition_by_entities import partition_by_entities class",
"= [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡',",
"from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities =",
"self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], )",
"['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part),",
"[ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for",
"['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts):",
"replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents,",
"utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts,",
"test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'],",
"None], ) def test_datum_wo_entity(self): # do not include origin output = partition_by_entities(self.datum_wo_entity, True)",
"= Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts",
"self.intents = [ Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity",
"Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0), ] self.datum",
"act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None,",
"from uttut.elements import Datum, Entity, Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def",
"entity_names, [None, 0, None, 1, None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names",
"partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output = partition_by_entities(self.datum_wo_entity, False) self.assertEqual(([['薄餡亂入']],",
"Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts =",
"['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part),",
"self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None], ) def",
"import Datum, Entity, Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance",
"0, None, 1, None, 2, None], ) def test_datum_wo_entity(self): # do not include",
"end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0), ] self.datum = Datum( utterance=self.utterance,",
"False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ]",
"def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'],",
"test_datum_wo_entity(self): # do not include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output)",
"import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0,",
"Datum, Entity, Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance =",
"['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part))",
"['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in",
"self.entities = [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2,",
"Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13,",
"True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'],",
"['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts,",
"= Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def",
"set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None], ) def test_datum_wo_entity(self):",
"[None, 0, None, 1, None, 2, None], ) def test_datum_wo_entity(self): # do not",
"= partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output = partition_by_entities(self.datum_wo_entity, False)",
"self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts,",
"partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞',",
"start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0),",
"[None, 0, None, 1, None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names =",
"set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None], ) def test_partition_by_entities_include_orig(self):",
"TestCase from uttut.elements import Datum, Entity, Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase):",
"include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output",
"self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output = partition_by_entities(self.datum_wo_entity, False) self.assertEqual(([['薄餡亂入']], [None]), output)",
"entity_names = partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'],",
"replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents",
"None, 1, None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True)",
"['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts):",
"PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3, end=5,",
"value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞',",
"uttut.elements import Datum, Entity, Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self):",
") def test_datum_wo_entity(self): # do not include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']],",
"['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None,",
"origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output =",
"utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts = [",
"output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin output = partition_by_entities(self.datum_wo_entity,",
"entity_names, [None, 0, None, 1, None, 2, None], ) def test_datum_wo_entity(self): # do",
"intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names",
"Entity, Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票'",
"do not include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include",
"unittest import TestCase from uttut.elements import Datum, Entity, Intent from ..partition_by_entities import partition_by_entities",
"None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'],",
"expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for",
"'我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8),",
"None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts =",
"[ Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum(",
"end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0), ]",
"..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [",
"start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents = [ Intent(label=0), ] self.datum = Datum(",
"[ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10,",
"Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ] self.intents =",
"zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None],",
"] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0,",
") self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum,",
"1, None, 2, None], ) def test_datum_wo_entity(self): # do not include origin output",
"'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names,",
"def test_datum_wo_entity(self): # do not include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]),",
"def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']),",
"1, None, 2, None], ) def test_partition_by_entities_include_orig(self): actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts",
"expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'],",
"actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None], )",
") def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'],",
"['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part",
"entity_names = partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞',",
"= [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part,",
"[ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part",
"= '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6,",
"partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天',",
"= partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'], ['斯堪地那維亞', 'KIX'],",
"] self.intents = [ Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, )",
"self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False)",
"test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'],",
"Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入',",
"setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities = [ Entity(label=0, value='明天', start=3, end=5, replacements=['下禮拜二']), Entity(label=1,",
"actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'], ['紐約'], ['飛到'],",
"self.assertEqual( entity_names, [None, 0, None, 1, None, 2, None], ) def test_datum_wo_entity(self): #",
"= [ Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity =",
"intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'],",
"] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)],",
"not include origin output = partition_by_entities(self.datum_wo_entity, True) self.assertEqual(([['薄餡亂入']], [None]), output) # include origin",
"= partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡',",
"actual_parts, entity_names = partition_by_entities(self.datum, True) expected_parts = [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'],",
"['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual(",
"end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']), ]",
"['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part,",
"entities=self.entities, ) self.datum_wo_entity = Datum( utterance='薄餡亂入', intents=[Intent(label=0)], ) def test_partition_by_entities(self): actual_parts, entity_names =",
"['飛到'], ['斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part))",
"def test_partition_by_entities(self): actual_parts, entity_names = partition_by_entities(self.datum, False) expected_parts = [ ['我想訂'], ['下禮拜二'], ['從'],",
"None, 2, None], ) def test_datum_wo_entity(self): # do not include origin output =",
"for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual( entity_names, [None, 0, None,",
"= [ ['我想訂'], ['明天', '下禮拜二'], ['從'], ['紐約'], ['飛到'], ['新加坡', '斯堪地那維亞', 'KIX'], ['的機票'], ]",
"'斯堪地那維亞', 'KIX'], ['的機票'], ] for exp_part, act_part in zip(expected_parts, actual_parts): self.assertEqual(set(exp_part), set(act_part)) self.assertEqual(",
"None, 1, None, 2, None], ) def test_datum_wo_entity(self): # do not include origin",
"'KIX']), ] self.intents = [ Intent(label=0), ] self.datum = Datum( utterance=self.utterance, intents=self.intents, entities=self.entities,",
"start=3, end=5, replacements=['下禮拜二']), Entity(label=1, value='紐約', start=6, end=8), Entity(label=2, value='新加坡', start=10, end=13, replacements=['斯堪地那維亞', 'KIX']),",
"Intent from ..partition_by_entities import partition_by_entities class PartitionByEntitiesTestCase(TestCase): def setUp(self): self.utterance = '我想訂明天從紐約飛到新加坡的機票' self.entities"
] |
[
"= os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close()",
"get_long_lat() for timer in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today()",
"sunset_for_today() if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if",
"'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude'])",
"os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat",
"Astral import astral, pytz, os, sys, json import datetime, time from time import",
"= long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def",
"timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file, indent=4) if __name__",
"sys, json import datetime, time from time import mktime from datetime import timedelta",
"long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today():",
"timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise",
"astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) +",
"timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list,",
"sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file, indent=4) if __name__ == '__main__':",
"time from time import mktime from datetime import timedelta def sunset_for_today(): today =",
"long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__)",
"this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json",
"get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json =",
"= sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else:",
"= sunset_for_today() if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M')",
"return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat",
"else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file, indent=4) if",
"this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset)",
"def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json",
"long_lat = get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression",
"with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file, indent=4) if __name__ == '__main__': update_timers()",
"settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def",
"sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with",
"def sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location =",
"def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list",
"timers_json.close() long_lat = get_long_lat() for timer in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude'])",
"if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] == 'off': timer['off']",
"settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json",
"today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location() this_location.longitude",
"datetime, time from time import mktime from datetime import timedelta def sunset_for_today(): today",
"datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude']",
"= sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise =",
"if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] == 'on': timer['on']",
"for timer in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if",
"open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer in timers_list: if",
"<filename>lib/sunrise_sunset.py #from astral import Astral import astral, pytz, os, sys, json import datetime,",
"= os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings",
"open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path",
"from datetime import timedelta def sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat",
"= sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file,",
"= get_long_lat() for timer in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset =",
"= get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression =",
"import datetime, time from time import mktime from datetime import timedelta def sunset_for_today():",
"= os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close() return",
"sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location()",
"sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file, indent=4)",
"sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on']",
"sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location()",
"#settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'],",
"return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json')",
"settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path)",
"long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now()",
"timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] == 'on':",
"settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path",
"= long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today =",
"= 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset =",
"#sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M')",
"return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path =",
"sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] == 'off':",
"this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path",
"= open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer in timers_list:",
"= astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today)",
"os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings =",
"from time import mktime from datetime import timedelta def sunset_for_today(): today = datetime.datetime.now()",
"this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset)",
"= long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path =",
"= 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path",
"datetime import timedelta def sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat =",
"= json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__)",
"import Astral import astral, pytz, os, sys, json import datetime, time from time",
"timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer in",
"sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today()",
"this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset",
"timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location",
"offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude",
"= long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def",
"timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat()",
"'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file:",
"== 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as",
"mktime from datetime import timedelta def sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60)",
"open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']}",
"time import mktime from datetime import timedelta def sunset_for_today(): today = datetime.datetime.now() offset",
"sunrise = sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] =",
"= int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude =",
"def sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location =",
"script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json)",
"= astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today)",
"#os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json)",
"settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close()",
"#script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path)",
"timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path, 'w')",
"= sunrise.strftime('%H:%M') with open(timers_file_path, 'w') as json_file: json.dump(timers_list, json_file, indent=4) if __name__ ==",
"this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path)",
"astral import Astral import astral, pytz, os, sys, json import datetime, time from",
"this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return",
"this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today",
"import mktime from datetime import timedelta def sunset_for_today(): today = datetime.datetime.now() offset =",
"'../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer",
"timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise']",
"sunset = sunset_for_today() if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] =",
"os, sys, json import datetime, time from time import mktime from datetime import",
"timedelta def sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location",
"int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude']",
"timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise",
"'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json =",
"else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if",
"#sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M')",
"json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer in timers_list: if timer['sunset']: #sunset =",
"timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json')",
"#from astral import Astral import astral, pytz, os, sys, json import datetime, time",
"json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path)",
"timer in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset']",
"timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] =",
"import timedelta def sunset_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat()",
"long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat():",
"get_long_lat() this_location = astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil'",
"timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] == 'on': timer['on'] =",
"timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer in timers_list: if timer['sunset']:",
"json import datetime, time from time import mktime from datetime import timedelta def",
"= sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else:",
"if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M') with open(timers_file_path,",
"import astral, pytz, os, sys, json import datetime, time from time import mktime",
"== 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']: #sunrise =",
"in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] ==",
"= '../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat = get_long_lat() for",
"settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path =",
"sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset = sunset_for_today() if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off']",
"+ timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat()",
"= sunset.strftime('%H:%M') if timer['sunrise']: #sunrise = sunrise_for_today(long_lat['longitude'],long_lat['latitude']) sunrise = sunrise_for_today() if timer['sunrise'] ==",
"+ timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path = os.path.join(sys.path[0],'..','config/settings.json') #settings_json =",
"os.path.join(sys.path[0],'..','config/settings.json') #settings_json = open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude':",
"'civil' return this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60)",
"long_lat = get_long_lat() for timer in timers_list: if timer['sunset']: #sunset = sunset_for_today(long_lat['longitude'],long_lat['latitude']) sunset",
"= json.load(timers_json) timers_json.close() long_lat = get_long_lat() for timer in timers_list: if timer['sunset']: #sunset",
"update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list =",
"= open('../config/settings.json') settings_json = open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude':",
"this_location.sunset(date=today) + timedelta(hours=offset) def sunrise_for_today(): today = datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat =",
"os.chdir(script_path) timers_file_path = '../config/timers.json' timers_json = open(timers_file_path) timers_list = json.load(timers_json) timers_json.close() long_lat =",
"= datetime.datetime.now() offset = int((mktime(time.localtime())-mktime(time.gmtime()))/60/60) long_lat = get_long_lat() this_location = astral.Location() this_location.longitude =",
"if timer['sunset'] == 'on': timer['on'] = sunset.strftime('%H:%M') else: timer['off'] = sunset.strftime('%H:%M') if timer['sunrise']:",
"= open(settings_file_path) settings = json.load(settings_json) settings_json.close() return {'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers():",
"astral.Location() this_location.longitude = long_lat['longitude'] this_location.latitude = long_lat['latitude'] this_location.solar_depression = 'civil' return this_location.sunrise(date=today) +",
"pytz, os, sys, json import datetime, time from time import mktime from datetime",
"= sunrise_for_today() if timer['sunrise'] == 'off': timer['off'] = sunrise.strftime('%H:%M') else: timer['on'] = sunrise.strftime('%H:%M')",
"'civil' return this_location.sunrise(date=today) + timedelta(hours=offset) def get_long_lat(): #script_path = os.path.dirname(__file__) #os.chdir(script_path) settings_file_path =",
"astral, pytz, os, sys, json import datetime, time from time import mktime from",
"{'longitude': settings[0]['longitude'], 'latitude': settings[0]['latitude']} def update_timers(): script_path = os.path.dirname(__file__) os.chdir(script_path) timers_file_path = '../config/timers.json'"
] |
[
"# # Distributed under the terms of the BSD license. # # The",
"the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is",
"# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors,",
"# ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors, see AUTHORS for more details.",
"# Distributed under the terms of the BSD license. # # The full",
"license. # # The full license is in the file LICENCE, distributed with",
"is used to follow the measurement progress. It can simply displays some database",
"the BSD license. # # The full license is in the file LICENCE,",
"Authors, see AUTHORS for more details. # # Distributed under the terms of",
"utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors, see AUTHORS for",
"----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors, see AUTHORS for more details. #",
"measurement monitor is used to follow the measurement progress. It can simply displays",
"the measurement progress. It can simply displays some database values or request the",
"for more details. # # Distributed under the terms of the BSD license.",
"of the BSD license. # # The full license is in the file",
"the plotting of some data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem, Monitor __all__",
"under the terms of the BSD license. # # The full license is",
"full license is in the file LICENCE, distributed with this software. # -----------------------------------------------------------------------------",
"to follow the measurement progress. It can simply displays some database values or",
"some data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem, Monitor __all__ = ['BaseMonitor', 'BaseMonitorItem',",
"# The full license is in the file LICENCE, distributed with this software.",
"follow the measurement progress. It can simply displays some database values or request",
"file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used",
"data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem, Monitor __all__ = ['BaseMonitor', 'BaseMonitorItem', 'Monitor']",
"plotting of some data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem, Monitor __all__ =",
"the terms of the BSD license. # # The full license is in",
"progress. It can simply displays some database values or request the plotting of",
"in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor",
"of some data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem, Monitor __all__ = ['BaseMonitor',",
"# Copyright 2015-2018 by Exopy Authors, see AUTHORS for more details. # #",
"BSD license. # # The full license is in the file LICENCE, distributed",
"The full license is in the file LICENCE, distributed with this software. #",
"is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement",
"terms of the BSD license. # # The full license is in the",
"measurement progress. It can simply displays some database values or request the plotting",
"used to follow the measurement progress. It can simply displays some database values",
"database values or request the plotting of some data. \"\"\" from .base_monitor import",
"displays some database values or request the plotting of some data. \"\"\" from",
"# # The full license is in the file LICENCE, distributed with this",
"see AUTHORS for more details. # # Distributed under the terms of the",
"It can simply displays some database values or request the plotting of some",
"LICENCE, distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to",
"-*- # ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors, see AUTHORS for more",
"<filename>exopy/measurement/monitors/api.py # -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy",
"monitor is used to follow the measurement progress. It can simply displays some",
"details. # # Distributed under the terms of the BSD license. # #",
"-*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors, see",
"2015-2018 by Exopy Authors, see AUTHORS for more details. # # Distributed under",
"\"\"\"A measurement monitor is used to follow the measurement progress. It can simply",
"some database values or request the plotting of some data. \"\"\" from .base_monitor",
"coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright 2015-2018 by Exopy Authors, see AUTHORS",
"AUTHORS for more details. # # Distributed under the terms of the BSD",
"software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to follow the measurement progress.",
"more details. # # Distributed under the terms of the BSD license. #",
"with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to follow the",
"values or request the plotting of some data. \"\"\" from .base_monitor import BaseMonitor,",
"this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to follow the measurement",
"simply displays some database values or request the plotting of some data. \"\"\"",
"license is in the file LICENCE, distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A",
"Exopy Authors, see AUTHORS for more details. # # Distributed under the terms",
"can simply displays some database values or request the plotting of some data.",
"or request the plotting of some data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem,",
"# ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to follow the measurement progress. It",
"Copyright 2015-2018 by Exopy Authors, see AUTHORS for more details. # # Distributed",
"distributed with this software. # ----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to follow",
"Distributed under the terms of the BSD license. # # The full license",
"request the plotting of some data. \"\"\" from .base_monitor import BaseMonitor, BaseMonitorItem, Monitor",
"by Exopy Authors, see AUTHORS for more details. # # Distributed under the",
"----------------------------------------------------------------------------- \"\"\"A measurement monitor is used to follow the measurement progress. It can"
] |
[
"* np.pi) * self.std) * np.exp(-(x - self.mean - cond) ** 2 /",
"__slots__ = ['spread'] def __init__(self, spread: float): super().__init__() self.spread = spread assert self.spread",
"x + np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape) def pdf(self, x, cond):",
"class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self, mean: float, spread: float): super().__init__()",
"... class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self, mean: float, spread: float):",
"np.pi) * self.std) * np.exp(-(x - self.mean - cond) ** 2 / (2",
"self.std) * np.exp(-(x - self.mean - cond) ** 2 / (2 * self.std",
"import numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def sample(self,",
"self.spread > 0, \"Wrong specification of distribution!\" def sample(self, x): return x +",
"** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread: float): super().__init__() self.spread",
"(np.sqrt(2 * np.pi) * self.std) * np.exp(-(x - self.mean - cond) ** 2",
"\"Wrong specification of distribution!\" def sample(self, x): return x + np.random.normal(self.mean, self.std, x.shape)",
"= mean self.std = spread assert self.std > 0, \"Wrong specification of distribution!\"",
"super().__init__() self.mean = mean self.std = spread assert self.std > 0, \"Wrong specification",
"np.ndarray, cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def",
"(2 * self.std ** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread:",
"class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def sample(self, x: np.ndarray) -> np.ndarray:",
"cond): return 1 / (np.sqrt(2 * np.pi) * self.std) * np.exp(-(x - self.mean",
"> 0, \"Wrong specification of distribution!\" def sample(self, x): return x + np.random.uniform(low=-self.spread",
"self.std > 0, \"Wrong specification of distribution!\" def sample(self, x): return x +",
"float): super().__init__() self.spread = spread assert self.spread > 0, \"Wrong specification of distribution!\"",
"assert self.spread > 0, \"Wrong specification of distribution!\" def sample(self, x): return x",
"float): super().__init__() self.mean = mean self.std = spread assert self.std > 0, \"Wrong",
"np.exp(-(x - self.mean - cond) ** 2 / (2 * self.std ** 2))",
"spread assert self.std > 0, \"Wrong specification of distribution!\" def sample(self, x): return",
"assert self.std > 0, \"Wrong specification of distribution!\" def sample(self, x): return x",
"float, spread: float): super().__init__() self.mean = mean self.std = spread assert self.std >",
"Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread: float): super().__init__() self.spread = spread assert",
"abc import ABCMeta, abstractmethod import numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self):",
"specification of distribution!\" def sample(self, x): return x + np.random.uniform(low=-self.spread / 2, high=self.spread",
"cond) ** 2 / (2 * self.std ** 2)) class Uniform(ProposalDistribution): __slots__ =",
"+ np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond): return 1 / (np.sqrt(2 *",
"np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self, mean: float, spread:",
"-> np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self, mean: float,",
"-> np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray: ...",
"* self.std ** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread: float):",
"* np.exp(-(x - self.mean - cond) ** 2 / (2 * self.std **",
"np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self, mean:",
"self.mean - cond) ** 2 / (2 * self.std ** 2)) class Uniform(ProposalDistribution):",
"= spread assert self.spread > 0, \"Wrong specification of distribution!\" def sample(self, x):",
"spread: float): super().__init__() self.mean = mean self.std = spread assert self.std > 0,",
"\"Wrong specification of distribution!\" def sample(self, x): return x + np.random.uniform(low=-self.spread / 2,",
"def sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray, cond:",
"x): return x + np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond): return 1",
"2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread: float): super().__init__() self.spread =",
"spread assert self.spread > 0, \"Wrong specification of distribution!\" def sample(self, x): return",
"sample(self, x): return x + np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape) def",
"@abstractmethod def sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray,",
"class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread: float): super().__init__() self.spread = spread",
"** 2 / (2 * self.std ** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread']",
"mean: float, spread: float): super().__init__() self.mean = mean self.std = spread assert self.std",
"abstractmethod import numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def",
"> 0, \"Wrong specification of distribution!\" def sample(self, x): return x + np.random.normal(self.mean,",
"+ np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape) def pdf(self, x, cond): return",
"numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def sample(self, x:",
"/ 2, high=self.spread / 2, size=x.shape) def pdf(self, x, cond): return np.array(1 /",
"= ['spread'] def __init__(self, spread: float): super().__init__() self.spread = spread assert self.spread >",
"import ABCMeta, abstractmethod import numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ...",
"= ['mean', 'std'] def __init__(self, mean: float, spread: float): super().__init__() self.mean = mean",
"x: np.ndarray, cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean', 'std']",
"__init__(self, mean: float, spread: float): super().__init__() self.mean = mean self.std = spread assert",
"= spread assert self.std > 0, \"Wrong specification of distribution!\" def sample(self, x):",
"__init__(self, spread: float): super().__init__() self.spread = spread assert self.spread > 0, \"Wrong specification",
"ABCMeta, abstractmethod import numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod",
"/ (np.sqrt(2 * np.pi) * self.std) * np.exp(-(x - self.mean - cond) **",
"def sample(self, x): return x + np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape)",
"np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def sample(self, x: np.ndarray) ->",
"def pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__ =",
"return x + np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond): return 1 /",
"np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond): return 1 / (np.sqrt(2 * np.pi)",
"Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self, mean: float, spread: float): super().__init__() self.mean",
"self.spread = spread assert self.spread > 0, \"Wrong specification of distribution!\" def sample(self,",
"x, cond): return 1 / (np.sqrt(2 * np.pi) * self.std) * np.exp(-(x -",
"/ (2 * self.std ** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self,",
"['spread'] def __init__(self, spread: float): super().__init__() self.spread = spread assert self.spread > 0,",
"@abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__",
"0, \"Wrong specification of distribution!\" def sample(self, x): return x + np.random.uniform(low=-self.spread /",
"np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray: ... class",
"mean self.std = spread assert self.std > 0, \"Wrong specification of distribution!\" def",
"as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def sample(self, x: np.ndarray)",
"pdf(self, x, cond): return 1 / (np.sqrt(2 * np.pi) * self.std) * np.exp(-(x",
"sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray)",
"np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape) def pdf(self, x, cond): return np.array(1",
"@abstractmethod def __init__(self): ... @abstractmethod def sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod",
"of distribution!\" def sample(self, x): return x + np.random.uniform(low=-self.spread / 2, high=self.spread /",
"self.mean = mean self.std = spread assert self.std > 0, \"Wrong specification of",
"def __init__(self, mean: float, spread: float): super().__init__() self.mean = mean self.std = spread",
"return 1 / (np.sqrt(2 * np.pi) * self.std) * np.exp(-(x - self.mean -",
"2, high=self.spread / 2, size=x.shape) def pdf(self, x, cond): return np.array(1 / self.spread)",
"1 / (np.sqrt(2 * np.pi) * self.std) * np.exp(-(x - self.mean - cond)",
"['mean', 'std'] def __init__(self, mean: float, spread: float): super().__init__() self.mean = mean self.std",
"self.std ** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def __init__(self, spread: float): super().__init__()",
"... @abstractmethod def sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self, x:",
"x: np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray) ->",
"np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray:",
"__slots__ = ['mean', 'std'] def __init__(self, mean: float, spread: float): super().__init__() self.mean =",
"of distribution!\" def sample(self, x): return x + np.random.normal(self.mean, self.std, x.shape) def pdf(self,",
"def sample(self, x): return x + np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond):",
"x + np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond): return 1 / (np.sqrt(2",
"distribution!\" def sample(self, x): return x + np.random.uniform(low=-self.spread / 2, high=self.spread / 2,",
"2 / (2 * self.std ** 2)) class Uniform(ProposalDistribution): __slots__ = ['spread'] def",
"def __init__(self): ... @abstractmethod def sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod def",
"- self.mean - cond) ** 2 / (2 * self.std ** 2)) class",
"__init__(self): ... @abstractmethod def sample(self, x: np.ndarray) -> np.ndarray: ... @abstractmethod def pdf(self,",
"specification of distribution!\" def sample(self, x): return x + np.random.normal(self.mean, self.std, x.shape) def",
"from abc import ABCMeta, abstractmethod import numpy as np class ProposalDistribution(metaclass=ABCMeta): @abstractmethod def",
"pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean',",
"... @abstractmethod def pdf(self, x: np.ndarray, cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution):",
"super().__init__() self.spread = spread assert self.spread > 0, \"Wrong specification of distribution!\" def",
"return x + np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape) def pdf(self, x,",
"- cond) ** 2 / (2 * self.std ** 2)) class Uniform(ProposalDistribution): __slots__",
"* self.std) * np.exp(-(x - self.mean - cond) ** 2 / (2 *",
"self.std, x.shape) def pdf(self, x, cond): return 1 / (np.sqrt(2 * np.pi) *",
"distribution!\" def sample(self, x): return x + np.random.normal(self.mean, self.std, x.shape) def pdf(self, x,",
"'std'] def __init__(self, mean: float, spread: float): super().__init__() self.mean = mean self.std =",
"spread: float): super().__init__() self.spread = spread assert self.spread > 0, \"Wrong specification of",
"self.std = spread assert self.std > 0, \"Wrong specification of distribution!\" def sample(self,",
"sample(self, x): return x + np.random.normal(self.mean, self.std, x.shape) def pdf(self, x, cond): return",
"x): return x + np.random.uniform(low=-self.spread / 2, high=self.spread / 2, size=x.shape) def pdf(self,",
"def pdf(self, x, cond): return 1 / (np.sqrt(2 * np.pi) * self.std) *",
"cond: np.ndarray) -> np.ndarray: ... class Normal(ProposalDistribution): __slots__ = ['mean', 'std'] def __init__(self,",
"x.shape) def pdf(self, x, cond): return 1 / (np.sqrt(2 * np.pi) * self.std)",
"def __init__(self, spread: float): super().__init__() self.spread = spread assert self.spread > 0, \"Wrong",
"ProposalDistribution(metaclass=ABCMeta): @abstractmethod def __init__(self): ... @abstractmethod def sample(self, x: np.ndarray) -> np.ndarray: ...",
"0, \"Wrong specification of distribution!\" def sample(self, x): return x + np.random.normal(self.mean, self.std,"
] |
[
"def rect(x,y,w,h,c,ax): polygon = plt.Rectangle((x,y),w,h,color=c) ax.add_patch(polygon) def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0]",
"polygon = plt.Rectangle((x,y),w,h,color=c) ax.add_patch(polygon) def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N =",
"def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size) for n, (x,y)",
"cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size) for n, (x,y) in enumerate(zip(X,Y)):",
"X[1]-X[0] N = float(X.size) for n, (x,y) in enumerate(zip(X,Y)): color = cmap[n,:] rect(x,0,dx,y,color,ax)",
"dx = X[1]-X[0] N = float(X.size) for n, (x,y) in enumerate(zip(X,Y)): color =",
"rect(x,y,w,h,c,ax): polygon = plt.Rectangle((x,y),w,h,color=c) ax.add_patch(polygon) def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N",
"ax.add_patch(polygon) def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size) for n,",
"= X[1]-X[0] N = float(X.size) for n, (x,y) in enumerate(zip(X,Y)): color = cmap[n,:]",
"plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size) for n, (x,y) in enumerate(zip(X,Y)): color",
"= plt.Rectangle((x,y),w,h,color=c) ax.add_patch(polygon) def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size)",
"dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size) for n, (x,y) in",
"plt.Rectangle((x,y),w,h,color=c) ax.add_patch(polygon) def dist_fill(X,Y, cmap,ax): plt.plot(X,Y,lw=0) dx = X[1]-X[0] N = float(X.size) for"
] |
[
"pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert len(top) == 0 assert len(bottom)",
"bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top,",
"needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\"",
"= Library('foo', '') x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({})",
"needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert top.resources",
"'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2 =",
"The bottom resource is y2. assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True))",
"= init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert top.resources ==",
"'') a = Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a,",
"= Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest of body</body></html>\" needed",
"'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True,",
"0 assert len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo =",
"bottom=True) needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed)",
"1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '') x1 =",
"b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" />",
"'a.css') b = Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle':",
"= TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom)",
"<head> tag has attributes foo = Library('foo', '') x1 = Resource(foo, 'a.js') needed",
"top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 0 injector",
"= Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js',",
"Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError):",
"injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3 assert len(bottom)",
"html = b\"<html><head>start of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True,",
"== y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert len(top) ==",
"init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3 assert",
"test_html_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css')",
"init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert len(top) ==",
"= TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top) == 4 assert len(bottom) ==",
"bottom resource is y2. assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top,",
"init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top) == 4",
"fanstatic import Library, Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo",
"len(top) == 0 assert len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe():",
"TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert len(top) == 0 assert len(bottom) ==",
"1 assert len(bottom) == 0 injector = TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom",
"test_html_bottom_safe(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css')",
"ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a =",
"a = Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b])",
"== b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script>",
"= Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with",
"injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something",
"body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a",
"len(bottom) == 1 # The bottom resource is y2. assert bottom.resources[0] == y2",
"= injector.group(needed) assert len(top) == 1 assert top.resources[0] == x2 assert len(bottom) ==",
"def test_html_bottom_safe(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo,",
"init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed)",
"assert len(bottom) == 0 injector = TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom =",
"assert len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo',",
"injector.group(needed) assert top.resources == [x2] assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo",
"TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top) == 4 assert len(bottom) == 0",
"= b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def",
"= Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start of",
"fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a = Resource(foo, 'a.css')",
"force_bottom=True)) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 3",
"TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a = Resource(foo, 'a.css') b =",
"True}) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 1",
"'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 =",
"= injector.group(needed) assert top.resources == [x2] assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom():",
"def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo,",
"bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 0 injector =",
"/></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo',",
"len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket 72: .need() broken when <head> tag",
"TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\"",
"'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom = injector.group(needed)",
"assert injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /> <script type=\"text/javascript\"",
"of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '')",
"needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /> <script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script",
"head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed)",
"foo = Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a])",
"top, bottom = injector.group(needed) assert len(top) == 0 assert len(bottom) == 1 assert",
"pytest from fanstatic import Library, Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def",
"[x2] assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1",
"needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html, needed)",
"== 0 assert len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo",
"Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert",
"assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo =",
"x2]) html = b\"<html><head>start of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector =",
"= init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start of",
"'c.js', depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1])",
"b]) injector = TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert len(top) == 1",
"minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True))",
"top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 1 def",
"injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert top.resources == [x2] assert",
"href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo =",
"Library, Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo',",
"Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom",
"0 injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3 assert",
"= init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top) ==",
"assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 =",
"injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /> <script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script>",
"= TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\"",
"TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) ==",
"y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 =",
"len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '')",
"injector = TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top) == 4 assert len(bottom)",
"attributes foo = Library('foo', '') x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector",
"== 1 assert len(bottom) == 1 def test_top_bottom_insert(): foo = Library('foo', '') x1",
"== [x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js')",
"assert injector(html, needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of",
"type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js',",
"= Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html =",
"y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed =",
"depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector =",
"injector.group(needed) assert len(top) == 4 assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top,",
"Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True)",
"assert top.resources == [x2] assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo =",
"rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified():",
"== 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3",
"x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html,",
"injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\"",
"a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True))",
"len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) ==",
"type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo",
"= TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom = injector.group(needed) assert len(top) == 1",
"TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom = injector.group(needed) assert len(top) == 1 assert",
"assert len(top) == 4 assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom",
"x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 =",
"top, bottom = injector.group(needed) assert len(top) == 1 assert top.resources[0] == x2 assert",
"== 3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2",
"injector.group(needed) assert len(top) == 1 assert len(bottom) == 3 top, bottom = injector.group(needed)",
"x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top)",
"body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\",
"= init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3",
"top, bottom = injector.group(needed) assert len(top) == 4 assert len(bottom) == 0 injector",
"= Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1,",
"len(bottom) == 1 def test_top_bottom_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js')",
"= Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>'",
"'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom =",
"profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js')",
"bottom = injector.group(needed) assert len(top) == 1 assert top.resources[0] == x2 assert len(bottom)",
"= TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) ==",
"= injector.group(needed) assert len(top) == 1 assert len(bottom) == 3 top, bottom =",
"injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head",
"TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) ==",
"head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def",
"Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) html",
"assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert",
"TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\"",
"== 1 # The bottom resource is y2. assert bottom.resources[0] == y2 injector",
"bottom = injector.group(needed) assert len(top) == 0 assert len(bottom) == 1 assert bottom.resources[0].relpath",
"depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert",
"len(top) == 3 assert len(bottom) == 1 # The bottom resource is y2.",
"top.resources[0] == x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '')",
"1 # The bottom resource is y2. assert bottom.resources[0] == y2 injector =",
"= TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom)",
"== 1 def test_top_bottom_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2",
"injector.group(needed) assert len(top) == 0 assert len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js'",
"x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head",
"Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something",
"force_bottom=True)) top, bottom = injector.group(needed) assert top.resources == [x2] assert bottom.resources == [x1,",
"injector = TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom = injector.group(needed) assert len(top) ==",
"== 3 assert len(bottom) == 1 # The bottom resource is y2. assert",
"= Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector",
"assert len(bottom) == 3 top, bottom = injector.group(needed) assert len(top) == 1 assert",
"<html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\"",
"bottom = injector.group(needed) assert len(top) == 4 assert len(bottom) == 0 injector =",
"<html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '') x1 = Resource(foo,",
"test_html_insert_head_with_attributes(): # ticket 72: .need() broken when <head> tag has attributes foo =",
"bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 = Resource(foo,",
"assert len(top) == 0 assert len(bottom) == 1 assert bottom.resources[0].relpath == 'a-minified.js' def",
"y1 = Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest of body</body></html>\"",
"of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html,",
"of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>'''",
"1 assert len(bottom) == 1 def test_top_bottom_insert(): foo = Library('foo', '') x1 =",
"import pytest from fanstatic import Library, Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector",
"injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo',",
"bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) == 1 # The",
"'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed)",
"'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed =",
"y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True))",
"top, bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) == 0 def",
"bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 1 def test_top_bottom_insert():",
"tag has attributes foo = Library('foo', '') x1 = Resource(foo, 'a.js') needed =",
"injector.group(needed) assert len(top) == 1 assert len(bottom) == 1 def test_top_bottom_insert(): foo =",
"== b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '') x1",
"b\"<html><head>start of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert",
"= TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert top.resources == [x2] assert bottom.resources",
"minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert len(top) == 0",
"assert len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket 72: .need() broken when <head>",
"depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed)",
"def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed",
"injector.group(needed) assert len(top) == 3 assert len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket",
"needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed)",
"# ticket 72: .need() broken when <head> tag has attributes foo = Library('foo',",
"= init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html, needed) ==",
"def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a = Resource(foo, 'a.css') b = Resource(foo,",
"= injector.group(needed) assert len(top) == 4 assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True))",
"TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert top.resources == [x2] assert bottom.resources ==",
"bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert len(top)",
"force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest",
"def test_html_insert_head_with_attributes(): # ticket 72: .need() broken when <head> tag has attributes foo",
"y2. assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed)",
"== 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2",
"injector.group(needed) assert len(top) == 3 assert len(bottom) == 1 # The bottom resource",
"1 assert top.resources[0] == x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo =",
"= injector.group(needed) assert len(top) == 3 assert len(bottom) == 1 # The bottom",
"= init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom =",
"needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert",
"src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a = Resource(foo,",
"<script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a = Resource(foo, 'a.js',",
"y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html",
"TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) == 1",
"test_top_bottom_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css')",
"Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest of body</body></html>\" needed =",
"init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert top.resources == [x2]",
"'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector",
"'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest of",
"len(top) == 1 assert len(bottom) == 3 top, bottom = injector.group(needed) assert len(top)",
"assert len(top) == 1 assert len(bottom) == 3 top, bottom = injector.group(needed) assert",
"== [x2] assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '')",
"== 1 assert len(bottom) == 3 top, bottom = injector.group(needed) assert len(top) ==",
"'') x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html =",
"Library('foo', '') x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html",
"init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a",
"1 assert len(bottom) == 3 top, bottom = injector.group(needed) assert len(top) == 1",
"bottom=True) needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom = injector.group(needed) assert",
"Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom =",
"top.resources == [x2] assert bottom.resources == [x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo',",
"import Library, Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo =",
"ticket 72: .need() broken when <head> tag has attributes foo = Library('foo', '')",
"len(top) == 3 assert len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket 72: .need()",
"== 3 top, bottom = injector.group(needed) assert len(top) == 1 assert top.resources[0] ==",
"'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\"",
"Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed",
"= Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2])",
"Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest",
"broken when <head> tag has attributes foo = Library('foo', '') x1 = Resource(foo,",
"html = b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\"",
"== 0 def test_html_insert_head_with_attributes(): # ticket 72: .need() broken when <head> tag has",
"len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js')",
"len(top) == 1 assert len(bottom) == 1 def test_top_bottom_insert(): foo = Library('foo', '')",
"foo = Library('foo', '') a = Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True)",
"= TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script",
"Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom =",
"foo = Library('foo', '') x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1]) injector =",
"needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '')",
"Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector =",
"bottom = injector.group(needed) assert top.resources == [x2] assert bottom.resources == [x1, y1] def",
"of body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) ==",
"'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed",
"depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert",
"== 1 assert top.resources[0] == x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo",
"x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1,",
"'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top, bottom =",
"= injector.group(needed) assert len(top) == 1 assert len(bottom) == 0 injector = TopBottomInjector({'bundle':",
"Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector =",
"def test_html_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo,",
"init_needed(resources=[y1]) injector = TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\",
"len(bottom) == 0 injector = TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom = injector.group(needed)",
"top, bottom = injector.group(needed) assert top.resources == [x2] assert bottom.resources == [x1, y1]",
"injector.group(needed) assert len(top) == 1 assert top.resources[0] == x2 assert len(bottom) == 3",
"injector.group(needed) assert len(top) == 1 assert len(bottom) == 0 injector = TopBottomInjector({'bundle': False,",
"Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2",
"foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1",
"= Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom",
"= Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector",
"4 assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert",
"Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top, bottom",
"TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\"",
"'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True,",
"init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start of head<link",
"= injector.group(needed) assert len(top) == 1 assert len(bottom) == 1 def test_top_bottom_insert(): foo",
"True}) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 0",
"assert len(top) == 3 assert len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket 72:",
"assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top)",
"injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert",
"is y2. assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom =",
"when <head> tag has attributes foo = Library('foo', '') x1 = Resource(foo, 'a.js')",
"# The bottom resource is y2. assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True,",
"len(top) == 1 assert len(bottom) == 0 injector = TopBottomInjector({'bundle': False, 'bottom': True})",
"Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2])",
"x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) y2 = Resource(foo,",
"x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) html = b\"<html><head>start",
"test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css')",
"= Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top,",
"= injector.group(needed) assert len(top) == 0 assert len(bottom) == 1 assert bottom.resources[0].relpath ==",
"test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed =",
"test_bundle_resources_bottomsafe(): foo = Library('foo', '') a = Resource(foo, 'a.css') b = Resource(foo, 'b.css',",
"has attributes foo = Library('foo', '') x1 = Resource(foo, 'a.js') needed = init_needed(resources=[x1])",
"needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) assert injector(html, needed) == b'''\\ <html><head>start",
".need() broken when <head> tag has attributes foo = Library('foo', '') x1 =",
"= b\"<html><head>start of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True))",
"'a.js') needed = init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html,",
"= Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector",
"top, bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) == 1 #",
"= b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\"",
"assert len(bottom) == 1 def test_top_bottom_insert(): foo = Library('foo', '') x1 = Resource(foo,",
"x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert",
"import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a = Resource(foo, 'a.css') b",
"y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top,",
"init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\",
"= Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top,",
"72: .need() broken when <head> tag has attributes foo = Library('foo', '') x1",
"3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 =",
"src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True)",
"b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '') x1 =",
"'bottom': True}) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) ==",
"assert top.resources[0] == x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo = Library('foo',",
"bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) == 0 def test_html_insert_head_with_attributes():",
"= Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 =",
"3 assert len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket 72: .need() broken when",
"profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo",
"src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 =",
"assert len(top) == 1 assert len(bottom) == 0 injector = TopBottomInjector({'bundle': False, 'bottom':",
"needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) ==",
"'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector({})",
"Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo,",
"TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert len(top) ==",
"top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 3 top,",
"= TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert len(top)",
"'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True))",
"from fanstatic import Library, Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe():",
"more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /> <script",
"assert len(top) == 3 assert len(bottom) == 1 # The bottom resource is",
"test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css')",
"type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2",
"= init_needed(resources=[x1]) injector = TopBottomInjector({}) html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) ==",
"resource is y2. assert bottom.resources[0] == y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom",
"= Library('foo', '') a = Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True) needed",
"False, 'bottom': True}) top, bottom = injector.group(needed) assert len(top) == 1 assert len(bottom)",
"injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert len(top) == 1 assert",
"Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector =",
"def test_top_bottom_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo,",
"b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /> <script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></head></html>'''",
"0 injector = TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom = injector.group(needed) assert len(top)",
"type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\" src=\"/fanstatic/foo/c.js\"></script></body></html>''' def test_html_bottom_safe_used_with_minified(): foo = Library('foo', '') a =",
"<reponame>minddistrict/fanstatic<gh_stars>1-10 import pytest from fanstatic import Library, Resource, init_needed, ConfigurationError from fanstatic.injector import",
"'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) html =",
"with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom = injector.group(needed) assert len(top) == 0 assert",
"= TopBottomInjector({}) html = b\"<html><head>something more</head></html>\" assert injector(html, needed) == b'''\\ <html><head>something more<link",
"[x1, y1] def test_html_top_bottom_set_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2",
"TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed) assert len(top) == 3 assert len(bottom) == 0",
"0 def test_html_insert_head_with_attributes(): # ticket 72: .need() broken when <head> tag has attributes",
"assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '') x1 = Resource(foo,",
"== 1 assert len(bottom) == 0 injector = TopBottomInjector({'bundle': False, 'bottom': True}) top,",
"== x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1",
"'') x1 = Resource(foo, 'a.js') x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js',",
"b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script",
"== 4 assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom = injector.group(needed)",
"html = b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>'''",
"depends=[x1, x2]) html = b\"<html><head>start of head</head><body>rest of body</body></html>\" needed = init_needed(resources=[y1]) injector",
"assert len(bottom) == 3 def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 = Resource(foo,",
"assert len(top) == 1 assert top.resources[0] == x2 assert len(bottom) == 3 def",
"len(top) == 1 assert top.resources[0] == x2 assert len(bottom) == 3 def test_html_top_bottom_force_bottom():",
"'') a = Resource(foo, 'a.js', minified='a-minified.js', bottom=True) needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True,",
"assert len(bottom) == 1 # The bottom resource is y2. assert bottom.resources[0] ==",
"b = Resource(foo, 'b.css', bottom=True) needed = init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True})",
"len(bottom) == 3 top, bottom = injector.group(needed) assert len(top) == 1 assert top.resources[0]",
"needed = init_needed(resources=[a]) injector = TopBottomInjector(dict(bottom=True, minified=True)) with pytest.raises(ConfigurationError): TopBottomInjector(dict(debug=True, minified=True)) top, bottom",
"Library('foo', '') a = Resource(foo, 'a.css') b = Resource(foo, 'b.css', bottom=True) needed =",
"y2]) injector = TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top) == 4 assert",
"== 3 assert len(bottom) == 0 def test_html_insert_head_with_attributes(): # ticket 72: .need() broken",
"Resource, init_needed, ConfigurationError from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '')",
"x2 = Resource(foo, 'b.css') y1 = Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1])",
"y2 injector = TopBottomInjector(dict(bottom=True, force_bottom=True)) top, bottom = injector.group(needed) assert len(top) == 1",
"== 1 assert bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '') x1",
"len(top) == 4 assert len(bottom) == 0 injector = TopBottomInjector(dict(bottom=True)) top, bottom =",
"= injector.group(needed) assert len(top) == 3 assert len(bottom) == 0 def test_html_insert_head_with_attributes(): #",
"needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top, bottom = injector.group(needed) assert len(top)",
"injector = TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert len(top) == 1 assert",
"def test_html_top_bottom_force_bottom(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 = Resource(foo,",
"from fanstatic.injector import TopBottomInjector def test_bundle_resources_bottomsafe(): foo = Library('foo', '') a = Resource(foo,",
"== 0 injector = TopBottomInjector({'bundle': False, 'bottom': True}) top, bottom = injector.group(needed) assert",
"b'<html><head profile=\"http://example.org\">something</head></html>' assert injector(html, needed) == b'''\\ <html><head profile=\"http://example.org\">something<script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script></head></html>''' def test_html_insert():",
"1 def test_top_bottom_insert(): foo = Library('foo', '') x1 = Resource(foo, 'a.js') x2 =",
"bottom = injector.group(needed) assert len(top) == 1 assert len(bottom) == 3 top, bottom",
"minified=True)) top, bottom = injector.group(needed) assert len(top) == 0 assert len(bottom) == 1",
"y2 = Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({}) top,",
"= init_needed(resources=[a, b]) injector = TopBottomInjector({'bundle': True}) top, bottom = injector.group(needed) assert len(top)",
"= Resource(foo, 'c.js', depends=[x1, x2]) needed = init_needed(resources=[y1]) injector = TopBottomInjector(dict(bottom=True)) top, bottom",
"3 assert len(bottom) == 1 # The bottom resource is y2. assert bottom.resources[0]",
"bottom.resources[0].relpath == 'a-minified.js' def test_html_bottom_safe(): foo = Library('foo', '') x1 = Resource(foo, 'a.js')",
"x2]) y2 = Resource(foo, 'y2.js', bottom=True) needed = init_needed(resources=[y1, y2]) injector = TopBottomInjector({})",
"== b'''\\ <html><head>something more<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /> <script type=\"text/javascript\" src=\"/fanstatic/foo/a.js\"></script> <script type=\"text/javascript\"",
"assert len(top) == 1 assert len(bottom) == 1 def test_top_bottom_insert(): foo = Library('foo',",
"injector(html, needed) == b'''\\ <html><head>start of head<link rel=\"stylesheet\" type=\"text/css\" href=\"/fanstatic/foo/b.css\" /></head><body>rest of body<script",
"3 top, bottom = injector.group(needed) assert len(top) == 1 assert top.resources[0] == x2"
] |
[
"2700 and states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\"",
"##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l :",
"binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs = [] for i in range(0,",
"\"bus\") VS = Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda node,",
"= True except State_Space_Error: self.is_default = False class IR(Node): implicit_off = lambda _,",
"= Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)],",
"0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self,",
"en1 : [{0}], en2 : [{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\")",
"Wire.ir_set) def __init__(self, name, bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured =",
"50)], vcc : [(873, 927)], io : [(873, 927)], aux : [(1746, 1854)],",
"bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of the FPGA, comments",
"False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\",",
"(\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\",",
"Input([(0, 12000)], \"power\") VID = Input([{0, 1}, {0, 1}, {0, 1}, {0, 1},",
"\"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\",",
"\"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"),",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)],",
"comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)],",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\",",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\",",
"self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name",
"wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s',",
"= states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name],",
"0)], ok : [{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)],",
"Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the CPU names its",
"\"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\")",
"Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)], \"power\") #reference input VREF = Output([(0,",
"0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\"",
"[ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\":",
"\"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\")",
"0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT =",
"(\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\",",
"name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT =",
"\"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\",",
"vadj: [(0, 0)]}, \"\"), ({aux : [(0, 0)]}, \"\"), ({io : [(0, 0)]},",
"False def ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700",
"NCP) class MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\" : [(max(thresh + 500,",
"intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default",
"2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)]}, \"%s should have stabilized",
"en1, en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1 : [{0}], en2",
"vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), #####",
"[(873, 927)], aux : [(1746, 1854)], vcco : [(1746, 1854)], vadj : [(1746,",
"] enzian_nodes = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\",",
"{ (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add",
"\"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}),",
"}, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\",",
"1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0,",
"[{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))],",
"\"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"),",
"#DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869,",
"implicit_off_2 = lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0,",
"Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd33, vdd,",
"[(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" :",
"default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)],",
"[], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply)",
"partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit,",
"\"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes",
"\"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}),",
"0.00105 * list(value[0])[0], node.device )) return fun def configure(self): if self.configured: return []",
"is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT",
"EN = Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\"",
"#VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE,",
"0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]),",
"\"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\",",
"= (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\",",
"self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {} def bus_req_off(self):",
"VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\") states = (lambda",
"(\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\",",
"{\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800],",
"\"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage):",
"2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)],",
"({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP",
"VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0,",
"14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set =",
"self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR,",
"\"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr):",
"prefix multidim = list({0} for i in range(8 - len(binary))) for i in",
"False return def configure(self): if self.configured: return [] else: self.configured = True return",
"implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\"",
"ddr13 : [(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24 : [(0, 0)], vttddr13",
"vttddr13 : [(570, 630)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk:",
"[] else: self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33",
"bus_addr, IR) def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]}",
"71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\",",
"\"power\") #reference input VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)],",
"2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)},",
"\"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}),",
"vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\"",
"12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)],",
"of ThunderX and FPGA to EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0,",
"vttddr13 : [(700, 800)]}, \"%s should have stabilized by now\" %vdd), ({ok :",
"\"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\",",
"\"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"),",
"\"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device): self.device = device self.configured = False",
"3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr):",
"(\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\",",
"PowerState({vdd : [(2300, 2600)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [",
"927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), #####",
"= {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}",
"3300), (0, 60)], [([(0, 3300), (0, 60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\",",
"partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold, device, loop1,",
"ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)], io33",
"self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN = Input([(0, 12000)],",
"{(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\",",
"True except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured",
"after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\"",
"\"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"),",
"[Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}],",
"super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {} def bus_req_off(self): return {} def",
"def construct_req_off(self, inputs): req = {} for node, _ in inputs: node_req =",
"0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [",
"\"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\",",
"[ ({clk: [(0, 0), (3300, 3300), (50, 50)]}, \"\"), ({vcc : [(873, 927)]},",
"\"power\") EN = Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN =",
"stabilized by now\" %vdd), ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul",
"BMC) self.configured = False def configure(self): if self.configured: return [] else: self.configured =",
"name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 =",
"of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301,",
"empty_intersection import math from functools import partial import z3 class INA226(Node): BUS =",
"import math from functools import partial import z3 class INA226(Node): BUS = Input([{0,",
"{\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\")",
"mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300, 3300), (0,",
"12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr):",
"name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name))",
"980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 :",
"[4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL,",
"set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\",",
"{(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\",",
"name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name))",
"ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85",
"[(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS = Input([{0, 1}], \"bus\") VDDH",
"node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device = device super(INA226, self).__init__(name, bus_addr, INA226)",
"{\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0,",
"\"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD =",
"= Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}],",
"thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda _, thresh : {\"VCC\" : [(0,",
"Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0,",
"partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device",
"[(500, 3040)]) self.is_default = True except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold,",
"default)], {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\",",
": [(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self,",
"[]), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []),",
":') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 =",
"1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)]},",
"1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0,",
"(\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p",
"{\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off = lambda",
"630)], vttddr13 : [(570, 630)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [",
"bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\")",
"\"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT = lambda default : Output([(0, 5250)],",
"1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13",
"\"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\",
"{}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)], {},",
"Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name,",
"[ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)],",
"def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req =",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)],",
"\"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID =",
"[(0, 0)], mgtaux_r : [(0, 0)], mgtavcc : [(0, 0)], ok : [{0}]",
"#VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73,",
"1700)], vtt : [(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)],",
"\"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\",",
": [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 :",
"\"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0,",
"CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0, 50)],",
"Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\")",
"[] else: self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] class",
"{ \"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300, 3300), (50, 50)]}, \"\"), ({vcc",
"[]), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0,",
"0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0, 0)],",
"(50, 50)], vcc : [(873, 927)], io : [(873, 927)], aux : [(1746,",
"VDD = Input([(0, 3600)], \"power\") CLK = Output([(0, 0), (3300, 3300), (0, 50)],",
"bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current",
"(\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\",",
"x2: z3.Or(x1 * 2 == x2, x1 * 2 == x2 + 1),",
"\"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"),",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda",
"[(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\" : [(0, 868)], \"VCC\"",
"node.device, pinname )) return (True, \"\\n\".join(commands)) return fun def configure(self): if self.configured: return",
"= Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not found in fpga",
"thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\" bus = \"power\" BUS = Input([{0,",
"= Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID = Input([{0, 1},",
"1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def",
"enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\",",
"at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\",",
"Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\")",
"self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)] return except",
"(lambda vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1 :",
"= self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name,",
"0)], vtt : [(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)],",
"1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164,",
"\"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not found in fpga boot sequ; filled",
"\"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX,",
"#EVAL 3 version of the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK =",
": [(570, 630)], vttddr13 : [(570, 630)]}, \"%s should have stabilized by now\"",
"({aux : [(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)],",
"return req def construct_dependency(self, name, req): return (SET.Implicit, [set(), set(), set(), set()], lambda",
": [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold,",
"(\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO:",
"\"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}),",
"p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT",
"def __init__(self, name, bus_addr, device): self.device = device self.configured = False super(SI5395, self).__init__(name,",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\", lambda node,",
"\"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\",",
"vttddr13 : [(700, 800)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk:",
"1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)], \"power\") VOUT",
"\"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT = Input([(0, 1000)],",
"[(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\"",
"0), (3300, 3300), (0, 50)], \"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09 =",
"\"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}], \"bus\") EN = Input([{0, 1}], \"logical\")",
"class PSU(Node): EN = Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000,",
"REGULAR VALUES ######### #ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL",
"[(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]},",
"lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"unite_dict, state_union, SET, empty_intersection import math from functools import partial import z3 class",
"\"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\",",
"\"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"])",
"(\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\",",
"\"VRI\" : [(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def",
"= [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return",
"###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)],",
"2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}],",
"self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name :",
"= Input([(0, 13200)], \"power\") VOUT = lambda _, thresh : Output([(0, 3040)], [Constraint([(500,",
"so I know which supplies to connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\")",
"DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)],",
"vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj:",
"\"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default, thresh: Output([(0,",
"################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)],",
"0b prefix multidim = list({0} for i in range(8 - len(binary))) for i",
"\"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\",",
"[{1}]}, \"\") ], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\",",
"{\"VID\" : binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\"",
"inputs): req = {} for node, _ in inputs: unite_dict(req, node.bus_req()) print(req) return",
"[(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt",
"[Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000,",
"VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0,",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}],",
"[(0, 868)], \"VCC\" : [(0, 2374)]} VCC = Input([(0, 6000)], \"power\") VRI =",
"self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)] return except",
"partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))],",
"{(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\",",
"= Input([(0, 12000)], \"power\") VID = Input([{0, 1}, {0, 1}, {0, 1}, {0,",
"VDD33 = Input([(0, 4000)], \"power\") BUS = Input([{0, 1}], \"bus\") VDDH = Input([(0,",
"vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self,",
"node_req = node.bus_req_off() for wire, state in node_req.items(): if not wire in req:",
"VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default = False",
"[(2900, 3630)]) except State_Space_Error: self.configured = False return def configure(self): if self.configured: return",
"implicit_on = lambda _, thresh: {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\"",
"#({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5",
": [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")),",
"(\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add",
"[(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164,",
"3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]},",
"0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\",",
"empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500,",
"mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc : [(873, 927)], ok",
": [(0, 0)], ddr13 : [(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24 :",
"MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this is sys_1v8....",
"[(0, 0), (3300, 3300), (50, 50)], vcc : [(873, 927)], io : [(873,",
"\"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3",
"0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))],",
"Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0,",
"{\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\"",
"% self.loop1, \"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0, 0),",
"0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc :",
"in inputs: unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self, inputs): req = {}",
"\"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}),",
"= True except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error:",
"sequence_generation import Node, Input, Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union,",
": [(0, 0)] }, { \"POWERED_ON\" : [ ({ok : [{0}]}, \"\") ],",
"\"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\",",
"\"\"), #### REGULAR TRANSITION STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400,",
"def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS =",
"def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\":",
"\"%s should have stabilized by now\" %vdd), ({ok : [{1}]}, \"must have written",
"Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK = Output([(0, 0), (3300, 3300), (0,",
"\"SHDN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}], \"logical\")",
"[(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13",
"5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], [ Constraint([{0, 1},",
"io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r,",
"def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default = True",
"{(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\",",
"MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}],",
"\"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr,",
"{(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}),",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda",
": [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5 :",
"_ in inputs: unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self, inputs): req =",
"[(self.threshold, 13200)]) except State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except",
"super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error:",
"12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)],",
"i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False def",
"\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class",
"Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"def bus_req(self): return {} def bus_req_off(self): return {} def update(self, states): try: intersect(states[self.V_OUT.name],",
"930)], vdd15 : [(1450, 1550)], ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5",
"[1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15,",
"\"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}),",
"(\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77",
"\"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2 == x2, x1 *",
"vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL 3: ################### ({vcco : [(1746, 1854)],",
"inputs: {}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name,",
"ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not imposed",
"VMON8 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0,",
"\"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"),",
"* 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b",
"super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of the FPGA, comments indicate",
"INA226) self.configured = False def ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name): if",
": [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"),",
"VCC = Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name,",
"(3300, 3300), (50, 50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"), ({io : [(873,",
": [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33:",
"= Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default = False super(ISL,",
"p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]),",
"1}], \"bus\") #loop 1 and loop 2 will have different addresses... VCC =",
"0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0,",
"[900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\",",
"class ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\"",
"0)], ddr13_2v5 : [(0, 0)], vttddr24 : [(0, 0)], vttddr13 : [(0, 0)],",
"return except State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\"",
"\"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\",",
"after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete=",
"multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device, pinname )) return (True, \"\\n\".join(commands)) return",
"({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)],",
"State_Space_Error: self.configured = False def configure(self): if self.configured: return [] else: self.configured =",
"construct_req(self, inputs): req = {} for node, _ in inputs: unite_dict(req, node.bus_req()) print(req)",
"(\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\",",
"\"V_EN\" : [{1}]} implicit_off = lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold *",
"% (self.device) ] class Clock(Node): CLK = Output([(0, 3300), (0, 60)], [([(0, 3300),",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda node,",
"(\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\",",
"3600)], \"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\" : [(0, 868)], \"VCC\" :",
"\"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name,",
"node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node):",
"lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\", lambda node, name:",
"0x0, NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0,",
"states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured = False def configure(self): if",
"list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s',",
"\"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\",",
"\"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\",",
"= lambda _, thresh: {\"V_IN\" : [(0, max(thresh + 499, 2699))], \"SHDN\" :",
"{\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 =",
"0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800,",
"{(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\",",
"\"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}),",
": [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off = lambda _,",
"[(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5",
"(\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\",",
"{(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\",",
"[(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc :",
"[ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2",
"\"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"),",
"[(0, 0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : [] }),",
"(\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"),",
"VALUES FOR EVAL 3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)],",
"3300), (0, 50)], \"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)],",
": [(700, 800)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0,",
"5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]},",
"[(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2:",
"3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\" : [{1}], \"BUS\":",
"[(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550,",
"0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"),",
"partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\"",
"self.device = device self.default = default self.is_default = False self.current = [(default, default)]",
"3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]}",
"\"power\") CLK_IN = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK = Output([(0,",
"\"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}),",
"\"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\"",
"unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self, inputs): req = {} for node,",
"0), (3300, 3300), (0, 50)], \"clock\") CLK = Output([(0, 0), (3300, 3300), (0,",
"[{1}], clk : [(0, 0), (3300, 3300), (50, 50)], vdd : [(940, 980)],",
"1}, {0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0,",
"\" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000, 2600)]}, \"\")",
"}, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2)",
"{\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")),",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], {},",
"[{1}], rst : [{1}], clk : [(0, 0), (3300, 3300), (50, 50)], vdd",
"after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}],",
"vdd : [(0, 0)], vdd09 : [(0, 0)], vdd15 : [(0, 0)], ddr24",
"= {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}],",
"= math.ceil(1600 - i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" :",
"set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ],",
"0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(),",
"vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13,",
"vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)],",
"\"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda _,",
": [(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ddr13 :",
"499, 2699))], \"SHDN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN = Input([{0,",
"print(req) return req def construct_dependency(self, name, req): return (SET.Implicit, [set(), set(), set(), set()],",
"_, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" :",
"= Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device):",
"self.current = [(self.default, self.default)] return except State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\"",
"self.current = [(self.default, self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current",
"\"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until \" + vdd + \" stabilized\"),",
"2500)], \"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda _, thresh",
"\"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300),",
"[(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr,",
"return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured = False return def configure(self):",
"1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd33, vdd, en1,",
"1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal):",
"\"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\",",
"(\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\",",
"\"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}),",
"CLK_IN = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK = Output([(0, 0),",
"__init__(self, name, bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured = False self.is_default",
"BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP,",
"clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : {",
"0), (3300, 3300), (50, 50)], vcc : [(873, 927)], io : [(873, 927)],",
"lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"[{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit,",
"PowerState({ ok : [{1}], rst : [{1}], clk : [(0, 0), (3300, 3300),",
"0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0,",
"\"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\",",
": [] } ), \"POWERED_ON\" : PowerState({ ok : [{1}], rst : [{1}],",
"12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)],",
"aux : [(1746, 1854)], vcco : [(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13",
"1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not imposed by the",
"name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT =",
"def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of",
"[ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]},",
"in fpga boot sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)],",
"states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0,",
"name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on = lambda _, thresh:",
"= Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)], \"power\") #reference input VREF =",
"= {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)],",
"CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0, 1}],",
"node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC",
"[{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]},",
"#SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24",
": [(0, 0)], vdd15 : [(0, 0)], ddr24 : [(0, 0)], ddr24_2v5 :",
"0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit,",
"= Input([(0, 23000)], \"power\") #slave input power VR_ON = Input([{0, 1}], \"logical\") V_OUT",
"\"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}),",
": [{0}], rst : [{0}], clk : [(0, 0), (3300, 3300), (0, 0)],",
"= device self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states): try:",
"[{0}], rst : [{0}], clk : [(0, 0), (3300, 3300), (0, 0)], vdd",
"self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default",
"\"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\",",
"(\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\",",
"{ \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd",
"\"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req",
"Constraint([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"{(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\",",
"[]}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr,",
"[], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)], {}, [],",
"ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)]}, \"%s",
"{0, 1}, {0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" :",
"0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0),",
"False)\" % self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2 ] class",
"Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update =",
": [(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24 : [(0, 0)], vttddr13 :",
"= True return [ \"init_device('%s', False)\" % (self.device) ] class Clock(Node): CLK =",
"Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\") def",
"en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait",
"def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0,",
"\"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\",",
"vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13",
"[(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"),",
"thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\"",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)],",
"name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr,",
"(\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\",",
"\"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\",",
"(\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\",",
"(\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\",",
"int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove",
"\"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit,",
"self.device = device self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states):",
"(\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\",",
"MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97",
"\"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"),",
"found in fpga boot sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0,",
"({clk: [(0, 0), (3300, 3300), (50, 50)], io33 : [(3140, 3460)]}, \"wait for",
"bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except",
": [{1}]} implicit_off = lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06)",
"= False self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751)",
": [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update=",
"io : [(0, 0)], aux : [(0, 0)], vcco : [(0, 0)], vadj",
"Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]},",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)],",
"\"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\",",
"Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0,",
"({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)],",
"threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\" : [{0}]}",
"= Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC",
"\"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\",",
"#vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR EVAL 3: ##### vcc_ddr13 : [(1200,",
"[Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}],",
"self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK",
"default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\",",
"io33 : [(0, 0)] }, { \"POWERED_ON\" : [ ({ok : [{0}]}, \"\")",
"False self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def",
"except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)] return except",
"\"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\",",
"threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\"",
"[ \"init_device('%s', False)\" % (self.device) ] #EVAL 3 version of the Enzian nodes,",
"added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"),",
"PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\")",
"[{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off",
"[1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]),",
"\"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\",",
"0), (3300, 3300), (0, 0)], vdd : [(0, 0)], vdd09 : [(0, 0)],",
"VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 =",
"3 version of the Enzian nodes, only changes classes of ThunderX and FPGA",
"1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))),",
": [(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 :",
": [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\",",
"3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33",
"node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0,",
"{\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]} device =",
"except State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured",
"\"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"),",
"SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires =",
"def ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and",
"bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}],",
"[(570, 630)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0),",
"\"V_EN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}], \"logical\")",
"vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({",
"(Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300), (0,",
"[\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes = [",
"(\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\",",
"Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\"",
"= lambda _, threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" :",
"inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus)",
"\" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]},",
"[\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\",",
"{(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"),",
": [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt",
"\"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}],",
"Input([{0, 1}], \"bus\") #loop 1 and loop 2 will have different addresses... VCC",
"else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0] *",
"{(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb",
"[{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000,",
"12000)], \"power\") VID = Input([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900,",
"vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24",
"}) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\",",
"% self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node):",
"try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured = False return def configure(self): if",
"set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\",",
"980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ####### REGULAR VALUES #########",
"(\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\",",
"\"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco",
"0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300),",
": [(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 :",
"[(3000, 4000)], vdd : [(2000, 2500)], en1 : [{1}], en2 : [{0}]}, {",
"partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0,",
"VDD = Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0,",
"MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" :",
"50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300),",
"\\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete=",
"* list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else: return (False,",
"functools import partial import z3 class INA226(Node): BUS = Input([{0, 1}], \"bus\") VS",
": [(0, 0)], ok : [{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l :",
"\"power\") #actually, the CPU names its ddr bank voltage DDR02 (not 24), #but",
"io33 : [(3140, 3460)]}, \"wait for %s to stabilize\" %(io33)), ({vdd : [(940,",
"EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda",
"name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT =",
"(\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\",",
"(\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"),",
"Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0,",
"{\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\",",
"* 1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off = lambda _, threshold: {\"V_IN\"",
"super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False def configure(self): if self.configured: return []",
"ok : [{0}], rst : [{0}], clk : [(0, 0), (3300, 3300), (0,",
"\"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}),",
": [(700, 800)]}, \"%s should have stabilized by now\" %vdd), #### FOR EVAL",
"REGULAR TRANSITION STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24",
"{\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\":",
"\"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"),",
"{\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]},",
"* list(value[0])[0] * multiplier, node.device, pinname )) return (True, \"\\n\".join(commands)) return fun def",
"[\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]),",
"== x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0,",
": [(0, 0)], vdd09 : [(0, 0)], vdd15 : [(0, 0)], ddr24 :",
"Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0,",
"\"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\")",
"{(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\",",
"{\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default,",
"l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold)",
"ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not imposed",
"partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\")",
"#vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\",",
"__init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of the",
"partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {},",
"Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\")",
"vttddr24 : [(570, 630)], vttddr13 : [(570, 630)]}, \"%s should have stabilized by",
"__init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage *",
"node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\",",
"partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))],",
"VCCAUX = Input([(0, 2000)], \"power\") states = (lambda clk, ok, vcc, io, aux,",
"schematics, so I know which supplies to connect.. :') VDD_DDR24 = Input([(0, 1650)],",
"have written pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\" :",
"{\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name,",
"\"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}], lambda node, inputs:",
"\"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit,",
"PowerState({vdd : [(0, 0)], en1 : [{0}], en2 : [{0}]}, { \"POWERED_ON\": [",
"VCC_IN = Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0,",
"node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node:",
"(\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"),",
"#ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL 3 ########### ddr24:",
"False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)],",
"\"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\",",
"NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395,",
"20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False def generate_output(self, number): name",
"\"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\",",
"class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}],",
"set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(),",
"vcc : [(873, 927)], io : [(873, 927)], aux : [(1746, 1854)], vcco",
"name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name))",
"name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 =",
"\"VIN\" : [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set)",
"outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)],",
"ThunderX) #EVAL 3 version of the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK",
"{\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0,",
"\"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\",",
"except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)]",
"\"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr):",
": [{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT",
"connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24",
"= Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)],",
": [(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 :",
": [(700, 800)]}, \"%s should have stabilized by now\" %vdd), ({ok : [{1}]},",
"\"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on =",
": [(0, 868)], \"VCC\" : [(0, 2374)]} VCC = Input([(0, 6000)], \"power\") VRI",
"outputs class ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0, 1}],",
": [{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)], mgtaux_r :",
"[]), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []),",
"#to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\",",
"Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def",
"####### FOR EVAL 3 ########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5",
"#cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\",",
"[]), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72,",
"0x0, NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0,",
"\"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)],",
"[(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda _,",
"* 2 == x2, x1 * 2 == x2 + 1), ([(\"VREF\", 0),",
"% (self.device) ] class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on",
"classes of ThunderX and FPGA to EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\",",
"\"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\",",
"\"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ],",
"except State_Space_Error: self.is_default = False class IR(Node): implicit_off = lambda _, thresh :",
"lambda _, thresh: {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]}",
"multiplier, node.device, pinname )) return (True, \"\\n\".join(commands)) return fun def configure(self): if self.configured:",
": [(0, 0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"),",
"State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)] return except State_Space_Error:",
"VMON11 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0,",
"else: self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] class MAX15301(Node):",
"lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock)",
"( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return fun def",
"#VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869,",
"\"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\")",
"2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS = Input([{0, 1}],",
"[(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140,",
"3300), (0, 50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)],",
"[{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name,",
"\"\"), ({en2 : [{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\",",
"[] }), \"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300, 3300), (50, 50)], vcc",
"\"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the CPU names its ddr bank",
"[(2375, 5500)]} implicit_off = {\"VRI\" : [(0, 868)], \"VCC\" : [(0, 2374)]} VCC",
"self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] class Clock(Node): CLK",
"lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
": [(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux : [(0, 0)]}, \"\"), ({io",
"PowerState({ clk: [(0, 0), (3300, 3300), (0, 0)], vcc : [(0, 0)], io",
"0), (3300, 3300), (50, 50)], vdd : [(940, 980)], vdd09 : [(870, 930)],",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)],",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda node,",
"Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\")",
"1}], \"bus\") EN = Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT",
"0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires = [ (\"b_psup_on\", \"bmc\",",
"Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda",
"\"init_device('%s', False)\" % (self.device) ] class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0,",
"= lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)],",
"set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0,",
"(\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\",",
"self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name : [(self.threshold,",
"_, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" :",
"Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\")",
"thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]}",
"\"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])),",
"[(0, 0), (3300, 3300), (50, 50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"), ({io",
"0x0, Oscillator, []), ] enzian_nodes = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0,",
"\"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device =",
"CPU_3) class PSU(Node): EN = Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)], [",
"(0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit,",
"aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc:",
"\"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 =",
"\"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"(\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\",",
"now\" %vdd), #### FOR EVAL 3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 :",
"Input([(0, 3600)], \"power\") states = (lambda ok, rst, clk, vdd, vdd09, vdd15, ddr24,",
"= req: {name : set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node):",
"[([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply,",
"({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\",",
"(\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\",",
"\"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\",",
"1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)],",
": [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)]}, \"%s should",
"{}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}),",
"vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)],",
"24), #but I adjusted it to match the schematics, so I know which",
"2000)], \"power\") #this is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC = Input([(0,",
"50)], io33 : [(3140, 3460)]}, \"wait for %s to stabilize\" %(io33)), ({vdd :",
"14000)]} BUS = Input([{0, 1}], \"bus\") EN = Input([{0, 1}], \"logical\") V_PWR =",
"\"\"), ({vcc : [(873, 927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux :",
"0)], vtt_ddr24 : [(0, 0)], vtt : [(0, 0)], mgtaux_l : [(0, 0)],",
"\"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]}, {}, dependency_update =",
"#\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node):",
"\"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\",",
"{\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def",
"2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not",
"__init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False def configure(self): if",
"= Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})),",
"self.loop1 = loop1 self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off",
"1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\"",
"MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93",
"= [(self.default, self.default)] return except State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\" :",
"name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT =",
"4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC = Input([(0, 6000)],",
"set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\" :",
"Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update=",
"6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID",
"the CPU names its ddr bank voltage DDR02 (not 24), #but I adjusted",
"+ 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\"",
"\"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"),",
"13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda",
": [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)],",
"return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self, states): try:",
"def bus_req_off(self): return {} def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current =",
"#add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\",",
"[(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0,",
"{\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0,",
"97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24",
"\"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\"",
"(\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\",",
"\"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\",",
"INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]),",
"{\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))],",
"(\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\",",
"(lambda ok, rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13,",
"pass class Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK = Output([(0, 0), (3300,",
": Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)],",
"\"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l :",
"4400)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class NCP(Node): implicit_on =",
"\"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\",",
"= {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0,",
"EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd33, vdd, en1, en2: {",
"50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\":",
"name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01))",
"Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\"",
"\"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 : [(1425,",
"vtt : [(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc",
"SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes = [ (\"power_bus\", 0x0, Bus,",
"[(0, 0)], vttddr13 : [(0, 0)], io33 : [(0, 0)] }, { \"POWERED_ON\"",
"ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)], io33",
"\"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\",",
"\"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\",",
"partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]},",
"0)], mgtaux_r : [(0, 0)], mgtavcc : [(0, 0)], ok : [{0}] },",
"supplies to connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)],",
"\"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\",",
"\"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK =",
"(\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\",",
": [(1140, 3400)], ##### VALUES FOR EVAL 3: ##### vcc_ddr13 : [(1200, 1200)],",
"range(8 - len(binary))) for i in binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs",
"1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off = lambda _, threshold: {\"V_IN\" :",
"name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name))",
": [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP ######### #({ddr24 : [(1425, 1575)],",
"isppac_monitor(self, pinname, wire_name, multiplier = 1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier):",
"[], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr,",
"self.configured = False def configure(self): if self.configured: return [] else: self.configured = True",
"lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
"partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]},",
"(\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p",
"vadj : [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13",
"3630)], \"VIN\" : [(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update =",
"\"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device): self.device = device self.configured",
"set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\",",
"lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
"\"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\",",
"try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name], [(2900,",
"({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({",
"only changes classes of ThunderX and FPGA to EVAL 3 versions enzian_nodes_EVAL3 =",
": [(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"),",
"\"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self, name,",
"500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" :",
": [(0, 0), (3300, 3300), (50, 50)], vdd : [(940, 980)], vdd09 :",
"_, default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700),",
"{\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000,",
"name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name))",
"(3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\",",
"lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\"",
"14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off = {\"VCC\" : [(0,",
"\"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}),",
"(50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300,",
"(\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\",",
"set())), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0),",
"14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device):",
"CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 =",
"= Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)],",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)],",
"False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass",
"self.default)] return except State_Space_Error: pass class Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\",",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"logical\") VOUT = Output([(0,",
"partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\",",
"\"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\",",
": [(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL 3 ########### ddr24: [(1200,",
"lambda default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\",",
"REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES",
"Input([(0, 3300)], \"power\") #actually, the CPU names its ddr bank voltage DDR02 (not",
"+ \" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\":",
"\"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15 =",
"\"power\") VCCAUX = Input([(0, 2000)], \"power\") states = (lambda clk, ok, vcc, io,",
"SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\",",
"([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\" : [(2375,",
"[0, 2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS = Input([{0,",
": [(0, 0)], ddr24 : [(0, 0)], ddr24_2v5 : [(0, 0)], ddr13 :",
"ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226,",
"[{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\"",
"VS = Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda node, name:",
"\"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\"",
"\"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)],",
"for i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False",
"(\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\",",
"(\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}),",
"\"\"), ##### REGULAR TRANSITION STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400,",
"= (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\",",
"(\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\",",
"node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6",
"], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name,",
"[(570, 630)], vttddr13 : [(570, 630)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\":",
"[{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\")",
": [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP #########",
"= [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return",
"[{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit,",
"3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\":",
"commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0] * multiplier,",
"[3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12,",
"(\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP",
"\"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}),",
"__init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on = lambda _,",
"{0, 1}], [ Constraint([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"device='%s', monitor='%s')\" % ( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s',",
"name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name))",
"4000)], \"power\") BUS = Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)], \"power\") #slave",
"return (True, \"\\n\".join(commands)) return fun def configure(self): if self.configured: return [] else: self.configured",
"return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)] return",
"[(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default =",
"[(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]}, {},",
"PowerState({ ok : [{0}], rst : [{0}], clk : [(0, 0), (3300, 3300),",
"= Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") VDD = Input([(0, 1210)], \"power\")",
"should have stabilized by now\" %vdd), ({ok : [{1}]}, \"must have written pll_mul",
"class IR(Node): implicit_off = lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\"",
"stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 :",
"super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0,",
"PowerState({ clk: [(0, 0), (3300, 3300), (50, 50)], vcc : [(873, 927)], io",
"[(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13",
"1}, {0, 1}, {0, 1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def",
"self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900,",
"+ vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 : [{1}], vdd:",
"\"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set)",
"(\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\",",
"%(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450,",
"\"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU",
"node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\",",
"\"EN_2\" : [{1}], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\",",
"ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu,",
"Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\")",
"thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]},",
"set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured",
"def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK =",
"\"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage):",
"(50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\",",
"= Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0,",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)],",
"MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\",",
"#2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3,",
"\"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\",",
"of the Enzian nodes, only changes classes of ThunderX and FPGA to EVAL",
"\"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured",
"en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd : [(0, 0)], en1",
"(\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\",",
"PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0,",
"\"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\",",
"Input([(0, 2000)], \"power\") #not found in fpga boot sequ; filled in like VCCO_VCC_DDR",
"FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0,",
"{ \"POWERED_DOWN\" : PowerState({ ok : [{0}], rst : [{0}], clk : [(0,",
"(\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\",",
"\"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\",",
"[(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name,",
"0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73",
"Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ],",
"\"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\",",
"[{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\",",
"1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually,",
"{\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit,",
"ThunderX and FPGA to EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus,",
"intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured = False def configure(self): if self.configured: return",
"[1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\",",
"self.is_default = False class IR(Node): implicit_off = lambda _, thresh : {\"VCC\" :",
"= Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd,",
": [(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC =",
": [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 :",
"vdd09 : [(0, 0)], vdd15 : [(0, 0)], ddr24 : [(0, 0)], ddr24_2v5",
"req = req: {name : set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))}) class",
"vttddr24 : [(700, 800)], vttddr13 : [(700, 800)]}, \"%s should have stabilized by",
"\"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\",",
"], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)], en1",
"2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname, wire_name,",
"fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\",",
"voltage_min = math.floor(1600 - i * 6.25) voltage_max = math.ceil(1600 - i *",
"\"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\",",
"1845)]}, \"\"), ##### FOR EVAL 3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 :",
"rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 :",
"[(1425, 1575)], ####### FOR EVAL 3 ########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)],",
"{0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self,",
": [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" :",
"3300), (50, 50)], vdd : [(940, 980)], vdd09 : [(870, 930)], vdd15 :",
"Oscillator, []), ] enzian_nodes = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU,",
": [(0, max(thresh + 499, 2699))], \"SHDN\" : [{0}]} V_IN = Input([(0, 6000)],",
"{(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\",",
"req = {} for node, _ in inputs: unite_dict(req, node.bus_req()) print(req) return req",
"\"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\",",
"\"POWERED_ON\" : PowerState({ ok : [{1}], rst : [{1}], clk : [(0, 0),",
": [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 :",
"dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit,",
"en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1 : [{0}], en2 :",
"vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000,",
"[(0, 0)], vtt : [(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r : [(0,",
"= Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK = Output([(0, 0), (3300,",
"vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ####### REGULAR VALUES ######### #ddr13",
": [(873, 927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]},",
"Output([(0, 3300), (0, 60)], [([(0, 3300), (0, 60)], {}, [], lambda node: node.indep(\"CLK\"))],",
"1}], \"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit,",
"i * 6.25) voltage_max = math.ceil(1600 - i * 6.25) outputs.append( Constraint( [(voltage_min,",
"[ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300,",
"Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\")",
"SHDN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default, thresh: Output([(0, thresh)],",
"(\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\",",
"(3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600,",
"v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105",
"\"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\",",
"node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12",
"\"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" :",
"\"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\",",
"3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []),",
"\"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\",",
"[(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return",
"[]), ] enzian_nodes = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []),",
"[{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1 :",
"\"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version",
"STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400,",
"self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix multidim =",
"vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2,",
"3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]},",
"[(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name,",
"self.configured = False self.is_default = False self.threshold = threshold self.device = device self.loop1",
"DDR02 (not 24), #but I adjusted it to match the schematics, so I",
"class Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK = Output([(0, 0), (3300, 3300),",
"name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name))",
"0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]),",
"6000)], \"power\") #reference input VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868,",
"False class IR(Node): implicit_off = lambda _, thresh : {\"VCC\" : [(0, 2500)],",
"({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)]},",
"% self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)],",
"dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit,",
"{0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1},",
"vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({",
"(3300, 3300), (50, 50)], vcc : [(873, 927)], io : [(873, 927)], aux",
"3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)],",
"VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0,",
"(\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\",",
": [(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ####### REGULAR",
"3 version of the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0,",
"[2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\",",
"[(self.default, self.default)] return except State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\" : [(868,",
"3040)]) self.is_default = True except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)])",
"\"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco :",
"\"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga? additional vcco thingy?",
"+ 500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\"",
"which supplies to connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0,",
")) return (True, \"\\n\".join(commands)) return fun def configure(self): if self.configured: return [] else:",
"State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured =",
"\"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\",",
"Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\")",
"\"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\",",
"[{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]},",
"0)], vcc : [(0, 0)], io : [(0, 0)], aux : [(0, 0)],",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda node,",
"2800)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)])",
"(\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\",",
"voltage DDR02 (not 24), #but I adjusted it to match the schematics, so",
"monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return",
"try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class",
"\"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\",",
"\"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}),",
"def __init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage",
"\"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]},",
"\"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\",",
"\"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL, []),",
"BUS = Input([{0, 1}], \"bus\") #loop 1 and loop 2 will have different",
"V_OUT = lambda default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]},",
"[(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({ clk: [(0,",
"3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 :",
": [{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\"",
"\"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION",
": [(0, 0)], en1 : [{0}], en2 : [{0}]}, { \"POWERED_ON\": [ ({en1",
"B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"\"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"),",
"name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 =",
": [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\"",
"def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0,",
": [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name,",
"0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0,",
"[(2000, 2500)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2 :",
"enzian_nodes = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0,",
"[(0, 0)]}, \"\"), ({aux : [(0, 0)]}, \"\"), ({io : [(0, 0)]}, \"\"),",
"{ (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add",
"0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\",",
"SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured = False",
": [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vtt :",
"class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK",
"({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\",",
"\"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states =",
"Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300,",
"ISPPAC) self.configured = False def generate_output(self, number): name = \"OUT\" + str(number) output",
"[(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ddr13 : [(1425,",
"3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13",
"2000)], \"power\") #not found in fpga boot sequ; filled in like VCCO_VCC_DDR voltages",
"= Input([{0, 1}], \"bus\") EN = Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)],",
"\"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\",",
"\"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr,",
"self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0,",
"= node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0],",
"\"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node):",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"\"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"),",
"3300), (50, 50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"), ({io : [(873, 927)]},",
"= (lambda ok, rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24,",
"1550)]}, \"\"), #### REGULAR TRANSITION STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5 :",
"0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\")",
"self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {},",
"\"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\", lambda node,",
"default, device): self.device = device self.default = default self.is_default = False self.current =",
"state_union, SET, empty_intersection import math from functools import partial import z3 class INA226(Node):",
"False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0,",
"[(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update=",
"[ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0),",
"{(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\",",
"\"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"),",
"V_PWR = Input([(0, 14000)], \"power\") V_OUT = lambda default : Output([(0, 5250)], [Constraint([],",
"\"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1},",
"CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(),",
"\"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK",
"\"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name,",
": [(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 :",
"= Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" :",
": [(550, 1700)], vtt : [(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r :",
"(3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0,",
"= Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd33,",
"{(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\",",
"[{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr,",
"(\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu",
"\"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}),",
"\"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)],",
"(\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"),",
"5500)]} implicit_off = {\"VRI\" : [(0, 868)], \"VCC\" : [(0, 2374)]} VCC =",
"partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class",
"v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0],",
"ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK =",
"\"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit,",
"\"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095",
"0)], en1 : [{0}], en2 : [{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]},",
"stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) },",
"= l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 =",
"1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL 3 ########### ddr24: [(1200, 1200)], ddr13:",
"[{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name,",
"self.is_default = True return except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)])",
"vtt_ddr24 : [(0, 0)], vtt : [(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r",
"0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\",",
"io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok",
"(\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\",",
"vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13",
"super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L",
"__init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0,",
"1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT = Input([(0,",
"[{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0,",
": PowerState({ clk: [(0, 0), (3300, 3300), (50, 50)], vcc : [(873, 927)],",
"def construct_dependency(self, name, req): return (SET.Implicit, [set(), set(), set(), set()], lambda states, req",
"self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)]",
"node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7",
"23000)], \"power\") #slave input power VR_ON = Input([{0, 1}], \"logical\") V_OUT = lambda",
"name)) VCC_IN = Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC =",
": [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)], mgtaux_l :",
"\"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\",",
"mgtaux_r : [(1746, 1854)], mgtavcc : [(873, 927)], ok : [{1}] }, {",
": [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 :",
"CPU names its ddr bank voltage DDR02 (not 24), #but I adjusted it",
"[\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"])",
": PowerState({vdd33: [(0, 0)], vdd : [(0, 0)], en1 : [{0}], en2 :",
"[]}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)], en1 : [{1}],",
"= Input([{0, 1}], \"bus\") VS = Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)],",
"= Input([(0, 4000)], \"power\") BUS = Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)],",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda",
"#VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301,",
"13200)], \"power\") VOUT = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\"",
"0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0,",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)],",
"1}, {0, 1}], [ Constraint([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"math.ceil(1600 - i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i",
"\"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])),",
": [(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24",
"1}], \"logical\") V_OUT = lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\"",
"[(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\",",
"p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]),",
"[ ({mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt : [(0,",
"ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\",",
"\"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\",",
"5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state),",
"\"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0,",
"VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0,",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda",
"({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746,",
"= self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr,",
"partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)],",
"\"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0,",
"1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state),",
"\"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\"",
": PowerState({ clk: [(0, 0), (3300, 3300), (0, 0)], vcc : [(0, 0)],",
": [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def",
"1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425,",
"pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass",
"[(0, 2800)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500,",
"node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class",
"(\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\",",
"{\"V_IN\" : [(0, max(thresh + 499, 2699))], \"SHDN\" : [{0}]} V_IN = Input([(0,",
"(\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"),",
"{\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0,",
"2 == x2, x1 * 2 == x2 + 1), ([(\"VREF\", 0), (\"VRI\",",
"range(0, 177): voltage_min = math.floor(1600 - i * 6.25) voltage_max = math.ceil(1600 -",
"#### REGULAR TRANSITION STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)],",
"vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], [",
"{\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")),",
"thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\",",
"self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda",
"0)], {\"VID\": [{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"\"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})])))",
"implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0,",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\",",
"self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500,",
": [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL 3: ################### ({vcco",
"VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8",
"({en1 : [{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\":",
"partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda _, thresh : Output([(0,",
"\"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\",",
"IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0,",
"self.is_default = False self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr,",
"vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk:",
"isl_outputs(): outputs = [] for i in range(0, 177): voltage_min = math.floor(1600 -",
"multidim.append({int(i)}) return multidim def isl_outputs(): outputs = [] for i in range(0, 177):",
"self.threshold = threshold self.device = device self.loop1 = loop1 self.loop2 = loop2 self.l1_addr",
"[{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT =",
"[(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0,",
"[(lambda x1, x2: z3.Or(x1 * 2 == x2, x1 * 2 == x2",
"EVAL 3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 :",
": [{0}], clk : [(0, 0), (3300, 3300), (0, 0)], vdd : [(0,",
"[(0, 0)], io33 : [(0, 0)] }, { \"POWERED_ON\" : [ ({ok :",
"[(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\",",
"node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT",
"[(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux : [(0, 0)]}, \"\"), ({io :",
"[(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)],",
": {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]} device",
": [(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 :",
"\"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname, wire_name, multiplier = 1):",
"to EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0,",
"= Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 =",
"MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []),",
"\"POWERED_ON\" : [ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" : [] } ),",
"= state_union(state, req[wire]) print(req) return req def construct_dependency(self, name, req): return (SET.Implicit, [set(),",
"p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500,",
"\"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300), (50, 50)], io33 : [(3140, 3460)]},",
"super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req = {} for node, _",
"\"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add",
"[(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200,",
"* 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\"",
"VDDH = Input([(0, 23000)], \"power\") #slave input power VR_ON = Input([{0, 1}], \"logical\")",
"io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300),",
"14000)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class Oscillator(Node): VDD =",
"(3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\",",
"try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured = False def configure(self): if self.configured:",
"node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\",",
"= Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\")",
"[(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0,",
"Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\":",
"for node, _ in inputs: unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self, inputs):",
"MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\" : [(max(thresh + 500, 2700), 5500)],",
"{(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\",",
"VR_ON = Input([{0, 1}], \"logical\") V_OUT = lambda default : Output([(0, 1520)], [Constraint([],",
": PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)], en1 : [{1}], en2 :",
"[\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes = [ (\"power_bus\", 0x0, Bus, []),",
"(\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT",
"\"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"),",
"set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1},",
"MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053,",
"(\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\",",
"(\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p",
"states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0,",
"[{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until \" + vdd",
"\"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)],",
"[(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\"",
"\"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\",",
"False self.threshold = threshold self.device = device self.loop1 = loop1 self.loop2 = loop2",
"name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name))",
"= Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)], \"power\") #slave input power VR_ON",
"0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else: return",
"3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))],",
"\"power\") VDD = Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 =",
"[ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\",",
"\"wait for %s to stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09 :",
"* 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda _, threshold:",
"\"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\"",
"0.00095 * list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device, pinname ))",
"0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]),",
"SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator,",
"ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP,",
"{ #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok : [{0}],",
"(\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\",",
"630)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300,",
"self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name,",
"version of the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0),",
"\"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr):",
"def __init__(self, name, bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured = False",
"\"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"),",
": [(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 :",
"3600)]) except State_Space_Error: self.configured = False def configure(self): if self.configured: return [] else:",
"def configure(self): if self.configured: return [] else: self.configured = True return [ \"init_device('isl6334d_ddr_v',",
"{\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\",",
"(\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\",",
"intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name],",
"node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095",
"\"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\",",
"name, output) def isppac_monitor(self, pinname, wire_name, multiplier = 1): def fun(value, _, node=self,",
"[{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd : [(2000,",
"\"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to",
"lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
"node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10",
"lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\"",
"lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"\"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\",",
"= Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}],",
"implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]} VDD",
"#### FOR EVAL 3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400, 2600)],",
"node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name))",
"88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13",
"False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name],",
"__init__(self, name, bus_addr, device): self.device = device self.configured = False super(SI5395, self).__init__(name, bus_addr,",
"C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set)",
"loop 2 will have different addresses... VCC = Input([(0, 4000)], \"power\") EN =",
"x1 * 2 == x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)],",
"self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053)",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], [ Constraint([{0,",
"\"bus\") VDDH = Input([(0, 23000)], \"power\") #slave input power VR_ON = Input([{0, 1}],",
"[(0, 0), (3300, 3300), (0, 0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN =",
": [(0, (max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\" : [{0}]} V_IN =",
"2600)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]},",
"[(0, thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\" bus = \"power\" BUS =",
"V_OUT = lambda _, default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold",
"2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)], io33 : [(3140, 3460)],",
"(\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p",
"MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\",",
"C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok)",
"\"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default,",
"Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string =",
"BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU =",
"[{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)],",
"2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator,",
"0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95",
"(\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\",",
"wire in req: req[wire] = state else: req[wire] = state_union(state, req[wire]) print(req) return",
"EVAL 3 ########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400,",
"version of the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}],",
"[(1746, 1854)], vcco : [(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400,",
"self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]}",
"= device self.default = default self.is_default = False self.current = [(default, default)] self.V_OUT",
"lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500,",
"bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self, states):",
"rst : [{1}], clk : [(0, 0), (3300, 3300), (50, 50)], vdd :",
"= {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]} implicit_off =",
"{\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\":",
"(3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300),",
"(\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"),",
": [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda _, thresh : {\"VCC\"",
"bus_addr, BMC) self.configured = False def configure(self): if self.configured: return [] else: self.configured",
"\"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\",",
"intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name], [(2900, 3630)])",
"\"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1 : [{1}], en2 :",
"en1 : [{0}], en2 : [{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\")",
"87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []),",
"vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"),",
"= [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU,",
"try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except State_Space_Error: self.is_default = False class",
"[(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)]}, \"%s should have",
"3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\",",
"VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states = (lambda",
"partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname, wire_name, multiplier",
"l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold)",
"\"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\",",
"\"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr,",
"\"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS = Input([{0, 1}], \"bus\")",
"14000)], \"power\") V_OUT = lambda default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\":",
"partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0,",
"({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until \"",
"self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage *",
"[(500, 1600)]) self.is_default = True except State_Space_Error: self.is_default = False class IR(Node): implicit_off",
"\"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\"",
"VCC = Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)], \"power\") #reference input VREF",
"names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B,",
"\"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]),",
"implicit_off = lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0,",
"[(550, 1700)], vtt : [(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746,",
"\"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"),",
"def __init__(self, name, bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL) def update(self,",
"VMON7 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0,",
"\"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}),",
"[(5500, 14000)]} BUS = Input([{0, 1}], \"bus\") EN = Input([{0, 1}], \"logical\") V_PWR",
"\"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\",",
"setattr(self, name, output) def isppac_monitor(self, pinname, wire_name, multiplier = 1): def fun(value, _,",
"(3300, 3300), (50, 50)], vdd : [(940, 980)], vdd09 : [(870, 930)], vdd15",
"[(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit,",
"\"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID = Input([{0, 1}, {0, 1}, {0,",
"[(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but the",
"\"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}),",
"\"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\",",
"[{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until \"",
"name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device): self.device = device for i",
"\"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\",",
"3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)],",
"\"init_device('%s', False)\" % (self.device) ] #EVAL 3 version of the Enzian nodes, only",
"#MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751,",
"(3300, 3300), (50, 50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300,",
"def __init__(self, name, bus_addr, device): self.device = device for i in range(0, 20):",
"voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" :",
"(lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd :",
"{ \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33:",
"% ( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" %",
"= False super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)])",
"\"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)],",
"Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\" : [{0}]},",
"\"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname, wire_name, multiplier = 1): def",
"PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)], en1 : [{1}], en2 : [{0}]},",
"\"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l",
"= Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\")",
"PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}], \"logical\")",
"[{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT =",
"Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)],",
")) return fun def configure(self): if self.configured: return [] else: self.configured = True",
"{\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def",
"monitor='%s')\" % ( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\"",
"1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX",
"from functools import partial import z3 class INA226(Node): BUS = Input([{0, 1}], \"bus\")",
": [(870, 930)], vdd15 : [(1450, 1550)], ddr13 : [(1425, 1575)], ddr24: [(1425,",
"{} def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default =",
"req[wire] = state_union(state, req[wire]) print(req) return req def construct_dependency(self, name, req): return (SET.Implicit,",
"(3300, 3300), (0, 0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0),",
"[]} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\",",
"1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0,",
"and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\" : [] }) },",
"(\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\",",
"[ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)],",
"\"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait",
"\"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\",",
"en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)],",
"3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)],",
"else: self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] #EVAL 3",
"13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\",",
"thresh: {\"V_IN\" : [(0, max(thresh + 499, 2699))], \"SHDN\" : [{0}]} V_IN =",
"(\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\",",
"bank voltage DDR02 (not 24), #but I adjusted it to match the schematics,",
"1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\",",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda",
"3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]}, {}, dependency_update",
"= False self.is_default = False self.threshold = threshold self.device = device self.loop1 =",
"[{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"},",
"list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s',",
"Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not found in fpga boot",
"self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] #EVAL 3 version",
"75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]),",
"{\"VRI\" : [(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)],",
"vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc",
": [(0, 0)], vtt : [(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r :",
"node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda",
"1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0,",
"(\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\",",
"(\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\",",
"({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def",
"2500)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states",
"(\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where",
"= list({0} for i in range(8 - len(binary))) for i in binary: multidim.append({int(i)})",
"1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200, 1200)],",
"\"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\",",
"[(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self,",
"({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13",
"bus = \"power\" BUS = Input([{0, 1}], \"bus\") #loop 1 and loop 2",
"connected DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700,",
"device, loop1, loop2, l1_addr, l2_addr): self.configured = False self.is_default = False self.threshold =",
"* 2 == x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\"",
"bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of the THUNDERX: Comments",
"\"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\",",
"945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13",
"vttddr24 : [(570, 630)], vttddr13 : [(570, 630)], io33 : [(3140, 3460)], },",
"\"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}),",
"\"wait until \" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd:",
"\"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\",",
"= Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)], \"power\") VOUT = lambda _,",
"\"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"),",
"VDD_OCT p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL,",
"Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set)",
"def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def",
"71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []),",
"{(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\",",
"\"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda _, thresh :",
"[{1}]} implicit_off = lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06) -",
"(0, 60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr):",
"node_string = \"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr):",
"\"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0,",
"#MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0,",
"{(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\",",
"\"power\") #slave input power VR_ON = Input([{0, 1}], \"logical\") V_OUT = lambda default",
"its ddr bank voltage DDR02 (not 24), #but I adjusted it to match",
"0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89",
"60)], [([(0, 3300), (0, 60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def",
"1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0,",
"VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT =",
"({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\", lambda",
"NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395,",
"bus_addr, NCP) class MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\" : [(max(thresh +",
"ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but the connected DIMM",
"x1, x2: z3.Or(x1 * 2 == x2, x1 * 2 == x2 +",
": [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not imposed by",
"binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0,",
"return [] else: self.configured = True return [ \"init_device('%s', False)\" % self.device, \"init_device('%s',",
": [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)],",
"\"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"),",
"= {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda",
"i in binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs = [] for i",
"self.default)] return except State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)],",
"= node.bus_req_off() for wire, state in node_req.items(): if not wire in req: req[wire]",
"\"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1 : [{1}], en2 : [{0}]}, {",
"{\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node):",
"\"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\",",
"[(0, 0)], vcco : [(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13 : [(0,",
": [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name : [(0,",
"to connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\")",
"2699))], \"SHDN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}],",
": [(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 :",
"{\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)],",
"\"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400,",
"[]), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]),",
"lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\"",
"cpu, but the connected DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)],",
"lambda _, default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06),",
"= Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})),",
"if self.configured: return [] else: self.configured = True return [ \"init_device('%s', False)\" %",
"0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit,",
"(\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\",",
"1550)], ####### REGULAR VALUES ######### #ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)], #######",
": [(0, thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\" bus = \"power\" BUS",
"\"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr,",
"[partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit,",
"1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states = (lambda ok, rst, clk,",
": [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)], io33 :",
"1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"),",
"state in node_req.items(): if not wire in req: req[wire] = state else: req[wire]",
"= [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\",",
"partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"VCC\" : [(0, 5250)],",
"vcco : [(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24",
"set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(),",
"4000)], vdd: [(2000, 2500)]}, \"wait until \" + vdd + \" stabilized\"), ({en1",
"73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]),",
"self.device = device for i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC)",
"vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt",
"False def generate_output(self, number): name = \"OUT\" + str(number) output = Output([{0, 1}],",
"ok : [{1}], rst : [{1}], clk : [(0, 0), (3300, 3300), (50,",
"\"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\")",
"[(0, 0)], vtt_ddr24 : [(0, 0)], vtt : [(0, 0)], mgtaux_l : [(0,",
"1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node):",
"True return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\")",
"False def configure(self): if self.configured: return [] else: self.configured = True return [",
"1520)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False",
"\"bus\") #loop 1 and loop 2 will have different addresses... VCC = Input([(0,",
"0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0,",
"[(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off = lambda _, thresh:",
"3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\" :",
"clk : [(0, 0), (3300, 3300), (50, 50)], vdd : [(940, 980)], vdd09",
"12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit,",
"\"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"),",
"name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of the FPGA,",
"= Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)],",
": [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self,",
"VMON10 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0,",
"= Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300), (50,",
"= {\"VRI\" : [(0, 868)], \"VCC\" : [(0, 2374)]} VCC = Input([(0, 6000)],",
"commands = node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" %",
"different addresses... VCC = Input([(0, 4000)], \"power\") EN = Input([{0, 1}], \"logical\") EN_2",
"Input([(0, 4000)], \"power\") BUS = Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)], \"power\")",
"intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured = False return def configure(self): if self.configured:",
"{} def bus_req_off(self): return {} def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current",
"2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0),",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\",",
"name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)],",
"super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN = Input([(0,",
"= Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0),",
": [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node):",
"BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\",",
"req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off =",
"#({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13",
"#reference input VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\"",
"\"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit,",
"1}, {0, 1}, {0, 1}, {0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" :",
": [(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746,",
"intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except State_Space_Error: self.is_default = False class IR(Node):",
"0x0, Oscillator, []), ] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\",",
"0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\",",
"ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr :",
"vttddr24 : [(700, 800)], vttddr13 : [(700, 800)], io33 : [(3140, 3460)], },",
"[(0, 0), (3300, 3300), (50, 50)], vdd : [(940, 980)], vdd09 : [(870,",
"B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok)",
"<gh_stars>0 from sequence_generation import Node, Input, Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error,",
"630)]}, \"%s should have stabilized by now\" %vdd), ###################################### ({ok : [{1}]}, \"must",
"{\"V_IN\" : [(0, (max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\" : [{0}]} V_IN",
"I adjusted it to match the schematics, so I know which supplies to",
"by now\" %vdd), ###################################### ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul",
"\"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"),",
"super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}],",
"__init__(self, name, bus_addr, device): self.device = device for i in range(0, 20): self.generate_output(i)",
"50)], \"clock\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0),",
"2700), 5500)], \"SHDN\" : [{1}]} implicit_off = lambda _, thresh: {\"V_IN\" : [(0,",
"__init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN = Input([{0, 1}],",
": [(2300, 2600)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd:",
"\"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1},",
"self).__init__(name, bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True",
"return except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default,",
"\"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name,",
"\"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this is",
"#VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL,",
"rst : [{0}], clk : [(0, 0), (3300, 3300), (0, 0)], vdd :",
"[{0}]} implicit_off_2 = lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" :",
"= self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name",
"14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500,",
"{0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set)",
"def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL =",
"0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit,",
"bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU",
"STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425,",
"- 1, 2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN =",
"(0, 50)], \"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\")",
"\"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states =",
"\"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class",
"multidim = list({0} for i in range(8 - len(binary))) for i in binary:",
"* 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01))",
"MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX = Input([(0,",
"= {} for node, _ in inputs: unite_dict(req, node.bus_req()) print(req) return req def",
"states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except State_Space_Error: self.is_default = False",
"name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0, 1}],",
"Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})),",
"{}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}],",
"\"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\",",
"lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"[(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\",",
"partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr,",
"], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\",",
"Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga",
"p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]),",
"{}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit,",
"\"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)], en1 :",
"{\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" :",
"vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" :",
"{\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)],",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"= Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name,",
"Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\",",
"def __init__(self, name, bus_addr, default, device): self.device = device self.default = default self.is_default",
"PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC,",
"self.VCC.name : [(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default =",
"= Input([(0, 2000)], \"power\") #not found in fpga boot sequ; filled in like",
"* list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return fun def configure(self): if self.configured:",
": [(0, 830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\")",
"CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0,",
"self.configured = False def ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0]",
"IR) def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def",
"of the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\")",
"VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375,",
"(\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\",",
"= False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)] return except State_Space_Error:",
"lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\",",
"\"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False",
"1854)], vcco : [(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)],",
"(\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added",
"[(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but",
"p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]),",
"1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []}",
"device): self.device = device self.default = default self.is_default = False self.current = [(default,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"VCC\" : [(0,",
"voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT =",
"0)], vdd09 : [(0, 0)], vdd15 : [(0, 0)], ddr24 : [(0, 0)],",
"class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0,",
"name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\")",
"\"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"),",
"self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name,",
"[1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0,",
"if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\",",
"{(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}),",
"50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux",
"\"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301,",
"0)], aux : [(0, 0)], vcco : [(0, 0)], vadj : [(0, 0)],",
"partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage",
": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set",
"self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self):",
"bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}], lambda node,",
"{}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\":",
"bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1",
"name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name))",
"3300), (50, 50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300),",
": [{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\"",
"= loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 =",
"[(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc : [(873,",
"\"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\",",
"{(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\",",
"max(thresh + 499, 2699))], \"SHDN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN",
"ddr13_2v5 : [(0, 0)], vttddr24 : [(0, 0)], vttddr13 : [(0, 0)], io33",
": [(0, 0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]},",
"[{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) },",
"(\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}),",
"= False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured = False return",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU,",
"\"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\":",
"\"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\",",
"800)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300,",
"= Input([{0, 1}], \"bus\") #loop 1 and loop 2 will have different addresses...",
"clk: [(0, 0), (3300, 3300), (0, 0)], vcc : [(0, 0)], io :",
": [(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13",
"= lambda _, default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold *",
"= Input([{0, 1}], \"logical\") states = (lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\"",
"\"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0, 3600)], \"VCC\" : [(0, 2374)]},",
"{0, 1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr):",
"(3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit,",
"{(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\",",
"= Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name))",
"], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\",",
"[(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\":",
"3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device):",
"{} for node, _ in inputs: node_req = node.bus_req_off() for wire, state in",
": [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0,",
"Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 =",
"ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24",
"return {} def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default",
": [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc",
"[(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)], io33 : [(3140,",
"\"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"),",
"= {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]} VDD =",
"[(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700,",
"[(700, 800)], vttddr13 : [(700, 800)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\":",
"14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500,",
"}), \"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300, 3300), (50, 50)], vcc :",
"Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name,",
"bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0, 1}], \"logical\")",
"Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\")",
"0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p",
"(\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13",
"fun def configure(self): if self.configured: return [] else: self.configured = True return [",
"(\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\",",
"\"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\",",
"* list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f,",
"(\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\",",
"(\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\",",
": [(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], {",
"177): voltage_min = math.floor(1600 - i * 6.25) voltage_max = math.ceil(1600 - i",
"vdd15 : [(1450, 1550)], ####### REGULAR VALUES ######### #ddr13 : [(1425, 1575)], #ddr24:",
"binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870,",
"class Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300,",
"0)], vadj: [(0, 0)]}, \"\"), ({aux : [(0, 0)]}, \"\"), ({io : [(0,",
"#2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0,",
"super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}],",
"[ Constraint([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"{\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)],",
"pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok : [{0}], rst : [{0}],",
"intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class Oscillator(Node):",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda",
"ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK =",
"except State_Space_Error: pass class Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK = Output([(0,",
"(\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"),",
"\"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"},",
"\"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"),",
"2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]} VDD = Input([(0, 3600)], \"power\")",
"= default self.is_default = False self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301,",
"DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)],",
"0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config)",
"bus_addr, MAX20751) def bus_req(self): return {} def bus_req_off(self): return {} def update(self, states):",
"Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit,",
"= Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)],",
"= Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class",
"[{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" :",
"power VR_ON = Input([{0, 1}], \"logical\") V_OUT = lambda default : Output([(0, 1520)],",
"name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"VCC\" :",
"Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" :",
"pinname, wire_name, multiplier = 1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands",
"FPGA to EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\",",
"[{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def",
"construct_dependency(self, name, req): return (SET.Implicit, [set(), set(), set(), set()], lambda states, req =",
"device for i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured =",
"lambda node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device = device super(INA226,",
"Input([{0, 1}], \"bus\") EN = Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\")",
"intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class NCP(Node):",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"logical\") VOUT",
"update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except State_Space_Error: self.is_default =",
"2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)],",
"[(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400,",
"\"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL",
"= self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary =",
"return (SET.Implicit, [set(), set(), set(), set()], lambda states, req = req: {name :",
"bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of the FPGA, comments indicate changes class",
"(0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device): self.device",
"1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24",
"Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09",
"vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1 : [{0}],",
"PSU(Node): EN = Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)],",
"bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN =",
"complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2 == x2, x1 * 2 ==",
"% ( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return fun",
"3630)]) except State_Space_Error: self.configured = False return def configure(self): if self.configured: return []",
"\"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" :",
"State_Space_Error: self.configured = False return def configure(self): if self.configured: return [] else: self.configured",
"1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu,",
"{\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name,",
"\"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not found",
"dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\",",
"0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device): self.device =",
"VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0,",
": [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\")",
"0)]}, \"\"), ({io : [(0, 0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"), ],",
"SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13",
"node, inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs:",
"Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ],",
"node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ],",
"like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\")",
"Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\",",
"{0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 =",
"2500)]}, \"wait until \" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"),",
"nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\",",
"will have different addresses... VCC = Input([(0, 4000)], \"power\") EN = Input([{0, 1}],",
"Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\")",
"\"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\":",
"\"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"),",
"[{0}]}, \"\") ], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\" : PowerState({ ok :",
"({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5",
"partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]},",
"[(0, 0)], vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux : [(0,",
"4000)], \"power\") VDD = Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2",
"mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []} )},",
"= device for i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured",
": [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR",
"bus_addr, INA226) self.configured = False def ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name):",
"}, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\",",
"node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device): self.device = device for",
"written pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\" : []",
"def configure(self): if self.configured: return [] else: self.configured = True return [ \"init_device('%s',",
"bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req = {} for node,",
"1575)], ####### FOR EVAL 3 ########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], #################################",
"+ str(number) output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250,",
"[{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self,",
"else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 *",
"########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not",
"(\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"),",
"{ \"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300, 3300), (0, 0)], vcc :",
"states = (lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)],",
"super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return",
"1}], \"bus\") VDDH = Input([(0, 23000)], \"power\") #slave input power VR_ON = Input([{0,",
"V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 =",
"{\"VID\": [{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"} ), \"POWERED_ON\" : PowerState({ ok : [{1}], rst : [{1}], clk :",
"[{0}], en2 : [{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\":",
"14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)},",
"(\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\",",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"logical\")",
"\"logical\") states = (lambda vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0,",
"(\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\",",
"(50, 50)], io33 : [(3140, 3460)]}, \"wait for %s to stabilize\" %(io33)), ({vdd",
"MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]}",
": [{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT",
"\"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\",",
"\"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}),",
"[ \"init_device('%s', False)\" % (self.device) ] class Clock(Node): CLK = Output([(0, 3300), (0,",
"__init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0,",
"lambda states, req = req: {name : set(filter(lambda x: not empty_intersection(x, req, states),",
"[(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]},",
"list(value[0])[0], node.device )) return fun def configure(self): if self.configured: return [] else: self.configured",
"needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 :",
"vttddr13 : [(570, 630)]}, \"%s should have stabilized by now\" %vdd), ###################################### ({ok",
"71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\",",
"def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except State_Space_Error: self.is_default",
"def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN = Input([{0,",
"{(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"),",
"SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes =",
"800)], vttddr13 : [(700, 800)]}, \"%s should have stabilized by now\" %vdd), ({ok",
"2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\" bus =",
"\"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\",",
"\"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0,",
"BUS = Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)], \"power\") #slave input power",
"set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0,",
"vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13",
"Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}], \"logical\")",
"= Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr,",
"pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s',",
"list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device, pinname )) return (True,",
"[(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\":",
"0), (3300, 3300), (50, 50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0),",
": [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\",",
"vdd15 : [(0, 0)], ddr24 : [(0, 0)], ddr24_2v5 : [(0, 0)], ddr13",
"(False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105",
": [(2000, 2500)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2",
"[(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)], io33 : [(3140,",
"(0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\":",
"OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\":",
"(\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0,",
"500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off = lambda _, thresh: {\"V_IN\" :",
"Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\")",
"[(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP ######### #({ddr24",
"1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update =",
"5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr,",
"super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0, 1}], \"logical\") OUT",
"\"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\",",
"\"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU",
"Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" :",
"vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]},",
"name, bus_addr, default, device): self.device = device self.default = default self.is_default = False",
"VIN = Input([(0, 13200)], \"power\") VOUT = lambda _, thresh : Output([(0, 3040)],",
"self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2",
"{}, set())])), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\",",
"VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0,",
"to stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15",
"\"logical\") V_OUT = lambda _, default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\" :",
"[ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []),",
"Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import math",
"\"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name,",
"\"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default =",
": binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1},",
"(\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\",",
"0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vtt : [(0, 0)],",
"= Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)],",
"vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r",
": [{1}], rst : [{1}], clk : [(0, 0), (3300, 3300), (50, 50)],",
"2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)],",
"TRANSITION STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 :",
"%s to stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09 : [(870, 930)],",
"[(0, 2374)]} VCC = Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)], \"power\") #reference",
"for wire, state in node_req.items(): if not wire in req: req[wire] = state",
"\"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\",",
"= Input([(0, 3600)], \"power\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [",
": [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24",
"\"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\",",
"\"\"), ({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" :",
"[(870, 930)], vdd15 : [(1450, 1550)], ####### REGULAR VALUES ######### #ddr13 : [(1425,",
"VMON12 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0,",
"__init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0,",
"\"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\",",
"VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not found in",
"ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : {",
"lambda node, inputs: {}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def",
": [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vadj: [(0,",
"12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name,",
"bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\"",
"\"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off =",
"{\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node,",
"\"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0,",
"name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name))",
"\"EN_2\" : [{0}]} device = \"ir3581\" bus = \"power\" BUS = Input([{0, 1}],",
"[(self.default, self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current = [(self.default,",
"[(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class",
"super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)], {},",
"[(0, (max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0,",
"node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT",
"\"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name,",
"= Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)],",
"1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164,",
"= Input([(0, 4000)], \"power\") EN = Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}],",
"[(0, 0)], ddr24 : [(0, 0)], ddr24_2v5 : [(0, 0)], ddr13 : [(0,",
"VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0,",
"\"\") ], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\" : PowerState({ ok : [{1}],",
"PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT =",
"(\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"),",
"implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500,",
"super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\" :",
"p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]),",
"lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"\"power\") def __init__(self, name, bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL) def",
"(3300, 3300), (0, 50)], \"clock\") CLK = Output([(0, 0), (3300, 3300), (0, 50)],",
"[{1}], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"},",
": [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2",
": [(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13",
"intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name],",
"[{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set =",
"3600)], \"power\") CLK_IN = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK =",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)],",
"[(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt :",
"vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj:",
"(\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\",",
"0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco:",
"input VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\" :",
"default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500, 2700), 5500)],",
"Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\")",
": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self,",
"return [ \"init_device('%s', False)\" % (self.device) ] #EVAL 3 version of the Enzian",
"0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0, 0)],",
"self.configured: return [] else: self.configured = True return [ \"init_device('%s', False)\" % self.device,",
"def isl_outputs(): outputs = [] for i in range(0, 177): voltage_min = math.floor(1600",
"\"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class",
"binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i",
"= False return def configure(self): if self.configured: return [] else: self.configured = True",
"self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}], lambda",
"927)], aux : [(1746, 1854)], vcco : [(1746, 1854)], vadj : [(1746, 1854)],",
"states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500: commands =",
"{\"VRI\" : [(0, 868)], \"VCC\" : [(0, 2374)]} VCC = Input([(0, 6000)], \"power\")",
"= Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)],",
"[(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES:",
"{\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\":",
"OUT1 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2",
"\"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 =",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)],",
"{self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name],",
": [(0, 0)], aux : [(0, 0)], vcco : [(0, 0)], vadj :",
"= Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\")",
"2 will have different addresses... VCC = Input([(0, 4000)], \"power\") EN = Input([{0,",
"3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" :",
"#to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\",",
"bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\",",
"try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error:",
"1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name,",
": [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\",",
"% ( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return (True,",
"Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]},",
"= lambda _, thresh: {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" :",
"(\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\",",
"{(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\",",
"\"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\",",
"Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self,",
"[partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300), (0, 0)],",
"0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71",
"\"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP",
"0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40,",
"EN = Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN = Input([(0,",
"[Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}],",
"3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970,",
"3630)]} def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def",
"\"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0,",
"VRI = Input([(0, 6000)], \"power\") #reference input VREF = Output([(0, 6000)], [Constraint([(435, 1800)],",
"* 6.25) voltage_max = math.ceil(1600 - i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)],",
"################################# ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but the connected",
"({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ],",
"[(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0,",
"%vdd), #### FOR EVAL 3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400,",
"lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\", lambda node, name:",
"6.25) voltage_max = math.ceil(1600 - i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\",
"State_Space_Error, unite_dict, state_union, SET, empty_intersection import math from functools import partial import z3",
"927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco",
": [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until",
"{(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\",",
"self.configured: return [] else: self.configured = True return [ \"init_device('%s', False)\" % (self.device)",
"ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" :",
": [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2",
"{0, 1}, {0, 1}, {0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}],",
"%vdd), ###################################### ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst",
": Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on),",
"6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self,",
"= Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON",
"[Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1},",
"(\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\",",
"\"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]},",
"\"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\",",
"and states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" %",
"[(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0,",
"vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l,",
"def __init__(self, name, bus_addr, device): self.device = device super(INA226, self).__init__(name, bus_addr, INA226) self.configured",
"bus_addr, device): self.device = device super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False def",
": [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default,",
"\"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device =",
"\"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}),",
"node, _ in inputs: unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self, inputs): req",
"[(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ],",
"= [(self.default, self.default)] return except State_Space_Error: pass class Oscillator(Node): VDD = Input([(0, 3600)],",
"self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1 = Input([{0,",
"1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL 3: ################### ({vcco : [(1746,",
"Output([{0, 1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name,",
"FOR EVAL 3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24",
"def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default = True",
"__init__(self, name, bus_addr, default, device): self.device = device self.default = default self.is_default =",
"\"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\",",
"0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3,",
"\"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}),",
"1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13",
"[{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name,",
"wire_name, multiplier = 1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands =",
"0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add",
"multidim def isl_outputs(): outputs = [] for i in range(0, 177): voltage_min =",
"\"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}],",
"\"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC = Input([(0, 6000)], \"power\")",
"{\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit,",
"\"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}],",
"vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]},",
"[(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400,",
"3300), (50, 50)], vcc : [(873, 927)], io : [(873, 927)], aux :",
"\"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}),",
"if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f,",
"{ \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd : [(0, 0)], en1 : [{0}],",
"[(1200, 1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 :",
"self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of the THUNDERX: Comments indicate changes",
"bus_addr, Bus) def construct_req(self, inputs): req = {} for node, _ in inputs:",
"partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {},",
"[(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0,",
"\"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\",",
"(\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\",",
"Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)],",
"{ \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until \" + vdd + \"",
"= Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)],",
"5500)], \"V_EN\" : [{1}]} implicit_off = lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold",
": [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500,",
"({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\" : PowerState({",
"930)], vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP ######### #({ddr24 :",
"\"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS",
"0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88",
"[]), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP, []),",
"[]), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0,",
"\"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\",",
"\"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}),",
"{}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"},",
"{(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\",",
"{name : set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on =",
"node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\",",
"1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0]",
"\"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name, bus_addr,",
"5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit,",
"filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0,",
"versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\",",
"\"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\",",
"#VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR,",
"self.configured = False return def configure(self): if self.configured: return [] else: self.configured =",
"#ddr24: [(1425, 1575)], ####### FOR EVAL 3 ########### ddr24: [(1200, 1200)], ddr13: [(1200,",
"* 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def",
"\"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd, en1, en2: {",
"name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 =",
"\"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)],",
"vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0,",
"\"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\",",
"self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name:",
"lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
"= Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 =",
"\"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\",",
"\"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self,",
"states = (lambda vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)],",
"name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name))",
"\"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\",",
": [ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\"",
"\"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\",",
"node.bus_req_off() for wire, state in node_req.items(): if not wire in req: req[wire] =",
"\"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def",
"\"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\",",
"self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]}",
"PSU) class Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)], [",
"\"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"),",
"0)]}, \"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({ clk: [(0, 0),",
"0), (3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\":",
"13200)]) except State_Space_Error: self.configured = False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error:",
"VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0,",
"\"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"])",
"node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\",",
"[(570, 630)]}, \"%s should have stabilized by now\" %vdd), ###################################### ({ok : [{1}]},",
"VCCINT = Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT = Input([(0,",
"node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\",",
"[(2300, 2600)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300,",
"#add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\",",
"2400)]}, \"wait until \" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"),",
"(\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\",",
"bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0,",
"B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set)",
"Wire.ir_set) VOUT_2 = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" :",
"[2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\",",
"Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT = lambda default :",
"= {} for node, _ in inputs: node_req = node.bus_req_off() for wire, state",
"\"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\",",
"{(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\",",
"\"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device = device self.default =",
": [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746,",
"3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\",",
"\"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)],",
"\"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit,",
": [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP",
"super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU =",
"0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s",
": [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self,",
"for %s to stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09 : [(870,",
"\"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}],",
"0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU,",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)],",
"(\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\",",
"[(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]} implicit_off = {\"VDD\": [(0,",
"1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt :",
"\"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit,",
"] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)], \"power\")",
": [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0, 3600)],",
"\"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF",
"\"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))],",
"vdd: [(2000, 2500)]}, \"wait until \" + vdd + \" stabilized\"), ({en1 :",
"[(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550,",
"#loop 1 and loop 2 will have different addresses... VCC = Input([(0, 4000)],",
"[(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}],",
"\"EN\" : [{0}]} implicit_off_2 = lambda _, thresh : {\"VCC\" : [(0, 2500)],",
"__init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN> V_IN\"",
"\"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)],",
"{\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)],",
"self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage,",
"node.device )) return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\"",
"0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU,",
"0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p",
"\"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP,",
"= 1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if",
"0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC",
"({rst : [{1}]}, \"\") ], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\",",
"0)], mgtavcc : [(0, 0)], ok : [{0}] }, { \"POWERED_ON\" : [",
"(3300, 3300), (0, 50)], \"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09 = Input([(0,",
": [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)], io33 :",
"ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0]",
"{0, 1}, {0, 1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self,",
": [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" :",
"= Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the CPU names",
"[{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\":",
"2600)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states",
"Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\",",
"\"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on",
"Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\",",
"{\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" :",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda node,",
"\"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\",",
"0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45,",
"\"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\",",
"(3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0,",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\",",
"\"VCCO_1V8\")}), #where to connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", {",
"self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\")",
": [(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt",
"(\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\",",
"self.is_default = False self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr,",
"bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self,",
": [(0, 0)], io : [(0, 0)], aux : [(0, 0)], vcco :",
"980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR",
"1575)], ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but the connected",
"if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device,",
"= math.floor(1600 - i * 6.25) voltage_max = math.ceil(1600 - i * 6.25)",
"v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device",
"\"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\",",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda",
"(\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\",",
"bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\",",
"[(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)]}, \"%s should have",
"implicit_off = lambda _, thresh: {\"V_IN\" : [(0, max(thresh + 499, 2699))], \"SHDN\"",
"Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)],",
"1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"),",
"super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\" :",
"list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device, pinname",
"\"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0,",
"4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def",
"####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR EVAL",
": [{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"},",
"[(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300), (50, 50)],",
"\"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 *",
"Input([(0, 1650)], \"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\")",
"ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24",
"[(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570,",
"\"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"),",
"l2_addr): self.configured = False self.is_default = False self.threshold = threshold self.device = device",
"] class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK",
"= Output([{0, 1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node, name, inputs:",
"(\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\",",
"\"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO =",
"({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]},",
"= Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" :",
"(\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p",
"MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13",
"3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)],",
"* 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01))",
"\"implicit_off\"))) return outputs class ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\" :",
"math from functools import partial import z3 class INA226(Node): BUS = Input([{0, 1}],",
"set(), set(), set()], lambda states, req = req: {name : set(filter(lambda x: not",
"Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node):",
"(\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\",",
"0)], vdd15 : [(0, 0)], ddr24 : [(0, 0)], ddr24_2v5 : [(0, 0)],",
"p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []),",
"0)], vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux : [(0, 0)]},",
"(\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\",",
"bus_addr, device): self.device = device self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395) def",
"FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP,",
": [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt :",
"\"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\",",
"wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return fun def configure(self):",
"= True return [ \"init_device('%s', False)\" % (self.device) ] class MAX15301(Node): implicit_off =",
"= threshold self.device = device self.loop1 = loop1 self.loop2 = loop2 self.l1_addr =",
"= Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)],",
"1}], \"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\",",
"node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\",",
"[(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)])",
"\"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\",",
"return {} def bus_req_off(self): return {} def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)])",
"0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX,",
": [(700, 800)], vttddr13 : [(700, 800)]}, \"%s should have stabilized by now\"",
"ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\",",
"{(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}),",
"##### VALUES FOR EVAL 3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200,",
"1}], \"bus\") VS = Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\", lambda",
"name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req = {} for",
"(\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ]",
": [{0}]}, \"\") ], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\" : PowerState({ ok",
"partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name,",
"({aux : [(0, 0)]}, \"\"), ({io : [(0, 0)]}, \"\"), ({vcc : [(0,",
"8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS = Input([{0, 1}], \"bus\") VDDH =",
"for node, _ in inputs: node_req = node.bus_req_off() for wire, state in node_req.items():",
"#UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800,",
"\"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect",
"ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr",
"return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % (",
"\"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off =",
"{0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]},",
"self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\" : [(max(int(threshold",
"0), (3300, 3300), (0, 0)], vcc : [(0, 0)], io : [(0, 0)],",
"({aux : [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP ###### #({vcco : [(1746,",
"VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the CPU names its ddr bank voltage",
"(not 24), #but I adjusted it to match the schematics, so I know",
"pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095 *",
"(\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\",",
"\"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device): self.device =",
"states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % (",
"vdd33, vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd : [(0,",
"\"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\",",
"Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")),",
"[ \"init_device('%s', False)\" % (self.device) ] class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\":",
"update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except State_Space_Error: self.is_default =",
"(True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire,",
"3400)], ##### VALUES FOR EVAL 3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 :",
"( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % (",
"0)], io : [(0, 0)], aux : [(0, 0)], vcco : [(0, 0)],",
"super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False def generate_output(self, number): name = \"OUT\"",
"states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f,",
"[(570, 630)], vttddr13 : [(570, 630)]}, \"%s should have stabilized by now\" %vdd),",
"nodes, only changes classes of ThunderX and FPGA to EVAL 3 versions enzian_nodes_EVAL3",
"\"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")),",
"}, { \"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300, 3300), (50, 50)]}, \"\"),",
"bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300,",
"name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name))",
"0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0,",
"name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name,",
"\"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\",",
"(\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\",",
"MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\",",
"[(873, 927)], ok : [{1}] }, { \"POWERED_DOWN\" : [ ({clk: [(0, 0),",
"\"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL",
"[(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ####### REGULAR VALUES",
"states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off",
"{\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\" : [(0,",
"{(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\",",
": [(0, 0)], io33 : [(0, 0)] }, { \"POWERED_ON\" : [ ({ok",
"[(2000, 2500)]}, \"wait until \" + vdd + \" stabilized\"), ({en1 : [{1}]},",
"class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2",
"the Enzian nodes, only changes classes of ThunderX and FPGA to EVAL 3",
"states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except State_Space_Error: self.is_default = False",
"6000)], \"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default, thresh:",
"sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R =",
"{ \"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)]}, \"\"),",
": [(873, 927)], aux : [(1746, 1854)], vcco : [(1746, 1854)], vadj :",
"\"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\",",
"BUS = Input([{0, 1}], \"bus\") EN = Input([{0, 1}], \"logical\") V_PWR = Input([(0,",
"({vcc : [(873, 927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746,",
"THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L =",
"State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)] return",
"wire, state in node_req.items(): if not wire in req: req[wire] = state else:",
"3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300), (50, 50)], io33",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(),",
"self).__init__(name, bus_addr, BMC) self.configured = False def configure(self): if self.configured: return [] else:",
"bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)],",
"#add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"),",
"\"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"),",
"3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())),",
"sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT =",
"= Input([{0, 1}], \"logical\") V_OUT = lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default,",
"(0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit,",
"Input([{0, 1}], \"bus\") VS = Input([(0, 6000)], \"power\") VBUS = Input([(0, 40000)], \"monitor\",",
"= \"OUT\" + str(number) output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)],",
"\"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")),",
"Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit,",
"Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device",
"[] else: self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] #EVAL",
"\"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name,",
"\"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit,",
"[(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok:",
"= Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK",
"Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)],",
"[(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default,",
"return [ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" %",
": [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR",
"(\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\",",
"(\"oscillator\", 0x0, Oscillator, []), ] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"),",
"= {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\" :",
"partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300,",
"VALUES ######### #ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL 3",
"Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT =",
"device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device ))",
"\"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500: commands = node.configure()",
"\"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\",",
"(50, 50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"), ({io : [(873, 927)]}, \"\"),",
"BUS = Input([{0, 1}], \"bus\") VS = Input([(0, 6000)], \"power\") VBUS = Input([(0,",
"\"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\",",
"[Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())),",
"\"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))],",
"name, bus_addr, device): self.device = device super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False",
"(\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\",",
"{0, 1}, {0, 1}], [ Constraint([{0, 1}, {0, 1}, {0, 1}, {0, 1},",
")}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\",",
"50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13",
"[ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\":",
"self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on",
"0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\" : [(2375, 5500)]},",
"i in range(0, 177): voltage_min = math.floor(1600 - i * 6.25) voltage_max =",
"vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), #####################################",
"l1_addr, l2_addr): self.configured = False self.is_default = False self.threshold = threshold self.device =",
"[(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off = lambda _, threshold:",
"{\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self,",
"{(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\",",
"\"power\") VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 =",
"[ ({clk: [(0, 0), (3300, 3300), (50, 50)], io33 : [(3140, 3460)]}, \"wait",
"\"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\",",
"[{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000,",
": [(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 :",
"Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK = Output([(0, 0), (3300, 3300),",
"def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN =",
"88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8,",
"{(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\",",
"[(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r",
"lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"len(binary))) for i in binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs = []",
"[(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)],",
"def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix multidim = list({0} for i",
"\"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\" bus = \"power\"",
"set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\" :",
"node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105",
"[ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD =",
"node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5",
"try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error:",
"1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873, 927)]},",
"[{0}]} device = \"ir3581\" bus = \"power\" BUS = Input([{0, 1}], \"bus\") #loop",
"= Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {},",
"{\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\",",
"= self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on =",
"mgtaux_r : [(0, 0)], mgtavcc : [(0, 0)], ok : [{0}] }, {",
"[(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550,",
"(3300, 3300), (50, 50)], io33 : [(3140, 3460)]}, \"wait for %s to stabilize\"",
"2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)],",
"intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default",
"Wire.clock_config) def __init__(self, name, bus_addr, device): self.device = device self.configured = False super(SI5395,",
"\"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit,",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\",",
"\"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\",",
"5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)],",
"node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
"in node_req.items(): if not wire in req: req[wire] = state else: req[wire] =",
"wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else:",
"default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {} def",
"* multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device, pinname )) return (True, \"\\n\".join(commands))",
"1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)]},",
"dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\",",
"not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)],",
"bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of the THUNDERX: Comments indicate changes class",
"\"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\",",
"Input([{0, 1}], \"logical\") states = (lambda vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}], \"logical\") VOUT = Output([(0, 1600)],",
"({io : [(0, 0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" :",
"self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)], {}, [],",
"{(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\",",
"self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:]",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda",
"Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24",
"the connected DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 :",
"Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0,",
"\"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\",",
"12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)],",
"False self.is_default = False self.threshold = threshold self.device = device self.loop1 = loop1",
"= device super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False def ina_monitor(self, wire_name): def",
"device self.loop1 = loop1 self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr",
"({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION",
": [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1",
"threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off =",
"class Clock(Node): CLK = Output([(0, 3300), (0, 60)], [([(0, 3300), (0, 60)], {},",
"\"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\",",
"node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN",
"\"\") ], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\",",
"\"power\") VRI = Input([(0, 6000)], \"power\") #reference input VREF = Output([(0, 6000)], [Constraint([(435,",
"p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC,",
"\"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"),",
"[Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\" : [{1}],",
"\"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}),",
"[(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 =",
"\"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)],",
"binary = bin(decimal)[2:] #remove 0b prefix multidim = list({0} for i in range(8",
"{\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"},",
"{}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}),",
"50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\",",
"else: self.configured = True return [ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" %",
"(\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA",
"V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" :",
"[(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2 ==",
"EN = Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT = lambda",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))],",
"Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)],",
"2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0,",
"self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L =",
"{0, 1}, {0, 1}, {0, 1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\")",
"3300), (0, 0)], vcc : [(0, 0)], io : [(0, 0)], aux :",
"V_IN\" V_IN = Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr,",
"p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\",",
"[(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set",
"{(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}),",
"3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config)",
": [(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1,",
": [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 :",
"[(0, 0)], vttddr24 : [(0, 0)], vttddr13 : [(0, 0)], io33 : [(0,",
"\"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\",",
"B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"\"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX)",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0,",
"#where to connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\",",
"1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400, 2600)],",
"(\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA",
"0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)],",
"\"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\", \"BUS\"), (\"U41\", \"BUS\"), (\"U51\", \"BUS\")}),",
"for i in binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs = [] for",
"req[wire] = state else: req[wire] = state_union(state, req[wire]) print(req) return req def construct_dependency(self,",
"(50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {},",
"5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095",
"lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit,",
"0.00105 * list(value[0])[0], node.device )) return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f,",
"1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but the",
"= {\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0, 830)]}",
"else: req[wire] = state_union(state, req[wire]) print(req) return req def construct_dependency(self, name, req): return",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda node,",
"\"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"),",
"vttddr13 : [(700, 800)]}, \"%s should have stabilized by now\" %vdd), #### FOR",
"1800)], {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints=",
"\"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"),",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda",
"3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)], [",
"\"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold, device, loop1, loop2, l1_addr,",
"2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570,",
"name)) def __init__(self, name, bus_addr, device): self.device = device for i in range(0,",
"= Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)],",
"[(1450, 1550)], ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)],",
"ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)]}, \"%s",
"[set(), set(), set(), set()], lambda states, req = req: {name : set(filter(lambda x:",
"configure(self): if self.configured: return [] else: self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\"",
"6000)], \"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default, threshold:",
"[(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default",
"\"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}),",
"[(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name,",
"[(0, 0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc",
"1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd, en1, en2:",
"\"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2 == x2, x1 * 2",
"3600)], \"power\") states = (lambda ok, rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5,",
"node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\",",
"(\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\",",
"name, bus_addr, device): self.device = device for i in range(0, 20): self.generate_output(i) super(ISPPAC,",
"[(0, 4400)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class NCP(Node): implicit_on",
"= False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured = False return try:",
"0), (3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300),",
"\"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\",",
"[(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self,",
"\"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300, 3300), (50,",
"\"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device): self.device = device self.configured =",
"{\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda",
"[]), ] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\",",
"#SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71",
"(\"IC8\", \"V_IN\"), (\"IC9\", \"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\",",
"{(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at",
"0)], vadj : [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)],",
"the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300),",
"generate_output(self, number): name = \"OUT\" + str(number) output = Output([{0, 1}], [Constraint([{1, 0}],",
"(\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s (\"IC12\", 0x11, MAX15301,",
"] class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\":",
"1}, {0, 1}, {0, 1}], [ Constraint([{0, 1}, {0, 1}, {0, 1}, {0,",
"{\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\":",
"except State_Space_Error: self.configured = False def configure(self): if self.configured: return [] else: self.configured",
"(SET.Implicit, [set(), set(), set(), set()], lambda states, req = req: {name : set(filter(lambda",
": [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0,",
"name, bus_addr, device): self.device = device self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395)",
"(0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def",
"VCC = Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT = Input([(0,",
"default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\",",
"= (lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd",
"3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300),",
"\"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0),",
"list(value[0])[0] * multiplier, node.device, pinname )) return (True, \"\\n\".join(commands)) return fun def configure(self):",
": [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []} )}, [\"CLK\",",
"#pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok : [{0}], rst",
"REGULAR TRANSITION STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13",
"(0, 60)], [([(0, 3300), (0, 60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config)",
"3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"),",
"device): self.device = device for i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr,",
"\"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}),",
"(\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\", \"BUS\"), (\"U41\", \"BUS\"), (\"U51\", \"BUS\")}), ]",
"p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL,",
"}, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300), (50, 50)], io33 :",
"], \"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off",
"= True return [ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s',",
"\"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"])",
"\"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2,",
"self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0),",
"enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0,",
"\"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3",
"thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]}",
"\"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr,",
"vdd : [(2000, 2500)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [",
"NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP,",
"bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT",
"1}, {0, 1}, {0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\"",
"\"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300,",
"92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]),",
": [(0, 0)], vttddr24 : [(0, 0)], vttddr13 : [(0, 0)], io33 :",
"\"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\", \"BUS\"), (\"U41\", \"BUS\"),",
"{\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1}, {0, 1}, {0,",
"set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\":",
": [(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 :",
"VMON9 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0,",
"[], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)], {}, [],",
"\"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\",",
"{(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\",",
": [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda _, thresh",
"\"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)], \"power\") VOUT =",
"[(0, 0), (3300, 3300), (0, 0)], vdd : [(0, 0)], vdd09 : [(0,",
"self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT",
"except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)]",
"ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP,",
"[(0, 0)], ok : [{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l : [(0,",
"2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr):",
"95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R,",
"\"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name,",
"def construct_req(self, inputs): req = {} for node, _ in inputs: unite_dict(req, node.bus_req())",
"set())])), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")),",
"> 2700 and states[node.VS.name][0][0] < 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s',",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda",
"name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name))",
"{\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0,",
"[(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550,",
"{\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\",",
"__init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req = {}",
"return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0],",
"\"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") VDD = Input([(0,",
"[(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]},",
": [{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []})",
"multiplier = 1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure()",
"(\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\",",
"import z3 class INA226(Node): BUS = Input([{0, 1}], \"bus\") VS = Input([(0, 6000)],",
"p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC,",
"\"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 =",
"\"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}),",
"= Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300,",
"Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)], \"power\")",
"node_req.items(): if not wire in req: req[wire] = state else: req[wire] = state_union(state,",
"\"POWERED_DOWN\", ThunderX) #EVAL 3 version of the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node):",
"state_union(state, req[wire]) print(req) return req def construct_dependency(self, name, req): return (SET.Implicit, [set(), set(),",
"(True, \"\\n\".join(commands)) return fun def configure(self): if self.configured: return [] else: self.configured =",
"\"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\",",
"class PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\") def __init__(self,",
"= Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT",
"vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24",
"name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name))",
"\"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\",",
"({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24",
"try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class",
"ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\"",
"(\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\", \"BUS\"), (\"U41\",",
"= \"power\" BUS = Input([{0, 1}], \"bus\") #loop 1 and loop 2 will",
"vtt : [(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)], mgtavcc",
"[{1}]}, \"\") ], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\",",
"= Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)],",
"req, states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}",
"CLK = Output([(0, 3300), (0, 60)], [([(0, 3300), (0, 60)], {}, [], lambda",
": PowerState({vdd : [(0, 0)], en1 : [{0}], en2 : [{0}]}, { \"POWERED_ON\":",
"bus_addr, PSU) class Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)],",
"= Input([(0, 6000)], \"power\") #reference input VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\"",
"(\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\",",
"= lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]},",
"_ in inputs: node_req = node.bus_req_off() for wire, state in node_req.items(): if not",
"= node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % (",
"_, default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500, 2700),",
"MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" :",
"bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured =",
"\"VCC\" : [(0, 2374)]} VCC = Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)],",
": [(0, 0)], vcco : [(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13 :",
"= {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}], \"bus\") EN =",
"print(req) return req def construct_req_off(self, inputs): req = {} for node, _ in",
"[(0, 0), (3300, 3300), (50, 50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0,",
"[(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update",
"###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200, 1200)],",
"3630)], \"VIN\" : [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\",",
"3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\"",
"now\" %vdd), ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst",
"= False class IR(Node): implicit_off = lambda _, thresh : {\"VCC\" : [(0,",
"\"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX =",
"\"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\",",
"(\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"),",
"\"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"),",
"800)], vttddr13 : [(700, 800)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [",
"1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states",
"(50, 50)], vdd : [(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450,",
"[(3140, 3460)]}, \"wait for %s to stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"),",
"\"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\",",
"partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit,",
"1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873,",
"\"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}),",
": [(0, 0)], mgtavcc : [(0, 0)], ok : [{0}] }, { \"POWERED_ON\"",
"def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self):",
"1.06) - 1, 2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0,",
"{\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off = lambda",
": [{0}]} implicit_off_2 = lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\"",
"0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0), (3300, 3300), (0,",
"= False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)] return except State_Space_Error:",
"[(870, 930)], vdd15 : [(1450, 1550)], ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)],",
"self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self):",
"vttddr13 : [(0, 0)], io33 : [(0, 0)] }, { \"POWERED_ON\" : [",
"in range(8 - len(binary))) for i in binary: multidim.append({int(i)}) return multidim def isl_outputs():",
"0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set)",
"have stabilized by now\" %vdd), #### FOR EVAL 3 ###################### ({ddr24 : [(1200,",
": [{1}]}, \"\") ], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\",",
"vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux",
"lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node, name:",
"{\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"},",
"fpga boot sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\")",
"{\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]},",
"\"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300, 3300), (50, 50)], vcc : [(873,",
"self.is_default = False self.threshold = threshold self.device = device self.loop1 = loop1 self.loop2",
"= Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0, 1000)],",
"(3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50,",
"##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 : [(550,",
"[Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)],",
"{(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"),",
"(\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\",",
"[(0, max(thresh + 499, 2699))], \"SHDN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\")",
"\"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0,",
"io : [(873, 927)], aux : [(1746, 1854)], vcco : [(1746, 1854)], vadj",
"VMON6 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0,",
"p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []),",
"13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name",
"self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\" : [(max(thresh",
"\"power\") BUS = Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)], \"power\") #slave input",
"super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix multidim",
"now\" %vdd), ###################################### ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"),",
"until \" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 :",
"Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default,",
"[{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set =",
"= self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)],",
"pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\" : [] })",
"Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {},",
"__init__(self, name, bus_addr, device): self.device = device super(INA226, self).__init__(name, bus_addr, INA226) self.configured =",
"\"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}),",
"[] } ), \"POWERED_ON\" : PowerState({ ok : [{1}], rst : [{1}], clk",
"1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") VDD =",
": [(570, 630)]}, \"%s should have stabilized by now\" %vdd), ###################################### ({ok :",
"], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\" : PowerState({ ok : [{1}], rst",
"3300), (0, 0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0), (3300,",
"vdd : [(0, 0)], en1 : [{0}], en2 : [{1}]}, { \"POWERED_ON\": [",
"12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0,",
": [{1}] }, { \"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300, 3300), (50,",
"Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") VDD",
"stabilized by now\" %vdd), #### FOR EVAL 3 ###################### ({ddr24 : [(1200, 1200)],",
"\"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\",",
"6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\",",
"\"init_device('%s', False)\" % (self.device) ] class Clock(Node): CLK = Output([(0, 3300), (0, 60)],",
"output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit,",
"adjusted it to match the schematics, so I know which supplies to connect..",
"MAX8869) class MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\" : [(max(int(threshold * 1.06),",
"4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}], \"bus\")",
"{ \"POWERED_ON\" : [ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" : [] }",
"partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)],",
"\"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\",",
": [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda",
"Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"\"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added bmc",
"state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]},",
": binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" :",
"\"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))],",
"* 1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\"",
"\"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr,",
"State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375,",
"\"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\",",
"from sequence_generation import Node, Input, Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict,",
"(\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of",
"by the cpu, but the connected DIMM SPD needs 2.5 V ddr13_2v5 :",
"1300)], \"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\") states",
"bus_req_off(self): return {} def update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name]",
"\"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class",
"1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]},",
"\"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\",",
"wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08,",
"[] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\",",
"Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import math from functools",
"name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}],",
"False)\" % (self.device) ] class Clock(Node): CLK = Output([(0, 3300), (0, 60)], [([(0,",
"0)], ddr24 : [(0, 0)], ddr24_2v5 : [(0, 0)], ddr13 : [(0, 0)],",
"in req: req[wire] = state else: req[wire] = state_union(state, req[wire]) print(req) return req",
"0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0,",
"\"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\",",
"\"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\",",
"[{0}], clk : [(0, 0), (3300, 3300), (0, 0)], vdd : [(0, 0)],",
"(0, 50)], \"clock\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0,",
"3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1",
"= True return except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current",
"[(1746, 1845)]}, \"\"), ##### FOR EVAL 3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13",
"math.floor(1600 - i * 6.25) voltage_max = math.ceil(1600 - i * 6.25) outputs.append(",
"{\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU =",
"\"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13",
"V_OUT = lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500,",
"set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured =",
"#EVAL 3 version of the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK =",
"1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID = Input([{0, 1}, {0, 1},",
"- len(binary))) for i in binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs =",
"V_IN = Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT = lambda",
"1}, {0, 1}, {0, 1}, {0, 1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(),",
"vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24",
"\"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def",
"12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)],",
"[1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\",",
"NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}),",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)],",
"[(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {}",
"MAX20751) def bus_req(self): return {} def bus_req_off(self): return {} def update(self, states): try:",
"State_Space_Error: self.is_default = False class IR(Node): implicit_off = lambda _, thresh : {\"VCC\"",
"FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\")",
"number): name = \"OUT\" + str(number) output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\":",
"= MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def",
"\"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states = (lambda ok, rst, clk, vdd,",
"set()], lambda states, req = req: {name : set(filter(lambda x: not empty_intersection(x, req,",
"1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\") states = (lambda clk, ok, vcc,",
"{(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"),",
"[(0, 0)], io : [(0, 0)], aux : [(0, 0)], vcco : [(0,",
"], \"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self, inputs):",
"\"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\",",
"vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd : [(0, 0)],",
"3460)]}, \"wait for %s to stabilize\" %(io33)), ({vdd : [(940, 980)]}, \"\"), ({vdd09",
"1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550,",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda node,",
"\\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}],",
"1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0,",
"ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt,",
"Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK",
"add real names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92",
"\"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\", {(\"U35\", \"VMON8\"),",
"\"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\",",
"MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\",",
"}, { \"POWERED_ON\" : [ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" : []",
"bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\":",
"device='%s', monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0]",
"( wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device,",
"[(873, 927)], io : [(873, 927)], aux : [(1746, 1854)], vcco : [(1746,",
"Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU,",
"{(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\",",
"\"clock\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300,",
"[(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0, 3600)], \"VCC\"",
"MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)],",
"def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string = \"<V_IN>",
"2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT",
"__init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600,",
"(\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\",",
"{(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\",",
"Input([(0, 23000)], \"power\") #slave input power VR_ON = Input([{0, 1}], \"logical\") V_OUT =",
"\"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"),",
"\"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\",",
"[(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL 3 ########### ddr24: [(1200, 1200)],",
"\"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33",
"0.00105 * list(value[0])[0] * multiplier, node.device, pinname )) return (True, \"\\n\".join(commands)) return fun",
"0)], vcco : [(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13 : [(0, 0)],",
"[(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i +",
"partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(2600,",
": PowerState({ ok : [{0}], rst : [{0}], clk : [(0, 0), (3300,",
"FOR EVAL 3 ########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 :",
"voltage_max = math.ceil(1600 - i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\"",
"Oscillator, []), ] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"),",
"{(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\",",
"0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300,",
"Constraint([{0}], lambda node, inputs: {}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\")",
"states, req = req: {name : set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))})",
"(\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\",",
"NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off =",
"868)], \"VCC\" : [(0, 2374)]} VCC = Input([(0, 6000)], \"power\") VRI = Input([(0,",
"[(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC = Input([(0,",
"class INA226(Node): BUS = Input([{0, 1}], \"bus\") VS = Input([(0, 6000)], \"power\") VBUS",
"{}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}),",
"(\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\",",
"states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default = True return except",
"{(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}),",
": [(1450, 1550)], ####### REGULAR VALUES ######### #ddr13 : [(1425, 1575)], #ddr24: [(1425,",
"try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except State_Space_Error: self.is_default = False try:",
"en2 : [{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}),",
"super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name :",
"Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd, en1,",
"{}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1},",
"bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix multidim = list({0}",
"self.configured = True return [ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" % self.loop1,",
"= False self.threshold = threshold self.device = device self.loop1 = loop1 self.loop2 =",
"\"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 =",
"\"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\":",
"\"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\",",
"FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0,",
"\"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"),",
"##### FOR EVAL 3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)],",
"lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\" : [(max(int(threshold *",
"ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]),",
"V_IN = Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer)",
"[(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name:",
"{(\"U35\", \"VMON8\"), (\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\",",
"= Input([(0, 2000)], \"power\") states = (lambda clk, ok, vcc, io, aux, vcco,",
"\"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd :",
"\"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 =",
"Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0,",
"return [] else: self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node):",
"imposed by the cpu, but the connected DIMM SPD needs 2.5 V ddr13_2v5",
"1}], \"logical\") states = (lambda vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd :",
"[partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\",",
"0)], vdd : [(0, 0)], vdd09 : [(0, 0)], vdd15 : [(0, 0)],",
"mgtavcc : [(0, 0)], ok : [{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l",
"[\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s (\"IC12\", 0x11,",
"default)], [Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]},",
"1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix",
"Input([(0, 4000)], \"power\") EN = Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\")",
"(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\",",
"\"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])),",
"[(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except",
"node.bus_req()) print(req) return req def construct_req_off(self, inputs): req = {} for node, _",
"[(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default =",
"3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)],",
"{\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name,",
"wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name,",
"set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\",",
"2374)]} VCC = Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)], \"power\") #reference input",
"super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0,",
"% (self.device) ] #EVAL 3 version of the Enzian nodes, only changes classes",
"(3300, 3300), (0, 0)], vcc : [(0, 0)], io : [(0, 0)], aux",
"[(1140, 3400)], ##### VALUES FOR EVAL 3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24",
"[(0, 0), (3300, 3300), (0, 0)], vcc : [(0, 0)], io : [(0,",
"[(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not imposed by the",
"0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return fun def configure(self): if",
"== 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device, pinname ))",
"(\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\",",
"(\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\",",
"return multidim def isl_outputs(): outputs = [] for i in range(0, 177): voltage_min",
": [(873, 927)], ok : [{1}] }, { \"POWERED_DOWN\" : [ ({clk: [(0,",
"(\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\",",
"default self.is_default = False self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name,",
"[(0, 0)], ddr24_2v5 : [(0, 0)], ddr13 : [(0, 0)], ddr13_2v5 : [(0,",
"3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\") #not found in fpga boot sequ;",
"bus_req(self): return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return",
"3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr,",
"en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"),",
"\"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\",",
"\"EN_VTT\" : [(0, 830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}],",
"[(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR EVAL 3: ##### vcc_ddr13",
"output) def isppac_monitor(self, pinname, wire_name, multiplier = 1): def fun(value, _, node=self, wire_name=wire_name,",
"vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)],",
"0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91",
"[{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"},",
"[{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\",",
"\"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300, 3300), (0, 0)], vcc : [(0,",
"{(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"),",
"980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ddr13 : [(1425, 1575)],",
"\"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\",",
"\"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10,",
"\"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13",
"\"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\",",
"= self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr,",
": [(570, 630)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0,",
"#remove 0b prefix multidim = list({0} for i in range(8 - len(binary))) for",
"\"V_IN\"), (\"U51\", \"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}),",
"bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit,",
"#SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24",
"VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the CPU",
"(\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\",",
"{self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self, states): try:",
": [(0, 0)], ddr24_2v5 : [(0, 0)], ddr13 : [(0, 0)], ddr13_2v5 :",
"[(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 :",
"self.configured = False def generate_output(self, number): name = \"OUT\" + str(number) output =",
"5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"),",
"partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0,",
"NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395,",
": [(0, 0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 :",
"50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}",
"\"power\" BUS = Input([{0, 1}], \"bus\") #loop 1 and loop 2 will have",
"implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]} implicit_off",
"str(number) output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]},",
"req): return (SET.Implicit, [set(), set(), set(), set()], lambda states, req = req: {name",
"\"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3,",
"0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes = [ (\"power_bus\", 0x0,",
"14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33 =",
"[(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP ######",
"{\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\",",
"ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs),",
"node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\",",
"0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70",
"i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2),",
"ddr24_2v5 : [(0, 0)], ddr13 : [(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24",
"vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13",
"= self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage",
"p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]),",
"\"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", {",
"def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured = False def",
"Input, Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import",
"\"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300, 3300), (50, 50)]}, \"\"), ({vcc :",
"(\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\",",
": {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2",
"= Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)],",
"3300)], \"power\") #actually, the CPU names its ddr bank voltage DDR02 (not 24),",
"50)], \"clock\") VDD = Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15",
"partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)],",
"[\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires = [",
"[(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24",
"\"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"),",
"implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\": [{0}], \"VDD33\":",
"Input([{0, 1}], \"logical\") states = (lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\" :",
": [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR EVAL 3: #####",
"2600)], #not imposed by the cpu, but the connected DIMM SPD needs 2.5",
"#MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053,",
"self.is_default = True except State_Space_Error: self.is_default = False class IR(Node): implicit_off = lambda",
"= MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {} def bus_req_off(self): return",
"[{1}]} implicit_off = lambda _, thresh: {\"V_IN\" : [(0, max(thresh + 499, 2699))],",
"0x0, NCP, []), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0,",
"[(0, 0)] }, { \"POWERED_ON\" : [ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\"",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}], \"VCC\" : [(0, 5250)], \"EN_PWR\"",
"MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300,",
": [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 :",
"{\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC",
"self.implicit_on = self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage),",
"\"power\") states = (lambda clk, ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24,",
"2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP)",
"EN_PWR = Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID = Input([{0,",
"\"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold, device, loop1, loop2,",
"aux : [(0, 0)], vcco : [(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13",
"[(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140,",
"ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13",
"[{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\":",
"vdd : [(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ddr13",
"3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\":",
"[(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ####",
"vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ],",
"#VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751,",
"bin(decimal)[2:] #remove 0b prefix multidim = list({0} for i in range(8 - len(binary)))",
": PowerState({vdd : [(2300, 2600)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\":",
"Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0,",
"\"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0,",
"\"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}),",
"\"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name,",
"[(0, 0)], mgtavcc : [(0, 0)], ok : [{0}] }, { \"POWERED_ON\" :",
"}, { \"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)]},",
"* 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class",
"Clock(Node): CLK = Output([(0, 3300), (0, 60)], [([(0, 3300), (0, 60)], {}, [],",
"= Input([{0, 1}], \"logical\") V_OUT = lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\":",
"\"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\",",
"\"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300,",
"1854)], mgtaux_r : [(1746, 1854)], mgtavcc : [(873, 927)], ok : [{1}] },",
"State_Space_Error: pass class Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK = Output([(0, 0),",
"{ \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]},",
"V_EN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default, threshold: Output([(0, default)],",
"partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\")",
"60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock,",
"1}], [ Constraint([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"= False self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301)",
"class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))],",
"\"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1 : [{0}], en2 : [{0}]}, {",
"(\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\",",
"\"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}),",
"Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr):",
"{(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\",",
"[([(0, 3300), (0, 60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self,",
"req: {name : set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on",
"[(0, 0)], en1 : [{0}], en2 : [{1}]}, { \"POWERED_ON\": [ ({en1 :",
"({vdd : [(940, 980)]}, \"\"), ({vdd09 : [(870, 930)], vdd15 : [(1450, 1550)]},",
"= self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def",
"[900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60,",
": [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until \" +",
"en2 : [{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}),",
"try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass try:",
"Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device):",
"[Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600,",
"loop1 self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold)",
"\"wait until \" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2",
"partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit,",
"] #EVAL 3 version of the Enzian nodes, only changes classes of ThunderX",
"0)], io33 : [(0, 0)] }, { \"POWERED_ON\" : [ ({ok : [{0}]},",
"\"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0,",
"= Input([(0, 3300)], \"power\") #actually, the CPU names its ddr bank voltage DDR02",
"\"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr,",
"[(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200,",
"Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default,",
"Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node,",
": [{0}], en2 : [{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ],",
"14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600,",
"\"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\",",
"{0, 1}, {0, 1}, {0, 1}], [ Constraint([{0, 1}, {0, 1}, {0, 1},",
"\"VID\"), (\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\",",
"True return except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current =",
"_, thresh: {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off",
"2600)], ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)],",
"Oscillator) class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300),",
"implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}], \"bus\") EN",
"\"power\") V_OUT = lambda default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500,",
"\"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\",",
"[2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0,",
"intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import math from functools import partial import",
"4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device): self.device",
"\"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\",",
"3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]} implicit_off = {\"VDD\": [(0, 2599)],",
"implicit_off = lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06) - 1,",
"x2, x1 * 2 == x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0,",
"loop1, loop2, l1_addr, l2_addr): self.configured = False self.is_default = False self.threshold = threshold",
"self.is_default = True except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except",
"= False def configure(self): if self.configured: return [] else: self.configured = True return",
"(\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"),",
"5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs",
"set(), set()], lambda states, req = req: {name : set(filter(lambda x: not empty_intersection(x,",
"but the connected DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24",
"\"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\", \"BUS\"),",
"\"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda _, thresh : Output([(0, 3040)],",
"[{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS =",
"_, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\" :",
"VMON5 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0,",
"\"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name,",
"ok, rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], [ Constraint([{0, 1}, {0,",
"[{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" :",
"VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24",
"connected DIMM SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570,",
"0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\")",
"({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco : [(1746,",
"(\"U35\", \"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\",",
"(\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\", \"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to",
"\"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3)",
"[ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}],",
"(\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\",",
"node, _ in inputs: node_req = node.bus_req_off() for wire, state in node_req.items(): if",
"commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s',",
"lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN\"",
": [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")))",
"__init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0,",
"= Input([{0, 1}], \"logical\") V_OUT = lambda _, default, threshold: Output([(0, default)], [Constraint([(default,",
"3300), (0, 0)], vdd : [(0, 0)], vdd09 : [(0, 0)], vdd15 :",
"\"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"),",
"vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux : [(0, 0)]}, \"\"),",
"[2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]),",
"\"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"),",
"partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)],",
"vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL 3: ###################",
"self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self): return",
"False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0,",
"mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)], mgtavcc : [(0, 0)], ok",
": [ ({mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt :",
"[(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570,",
"MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self):",
"{}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name,",
"\"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self,",
"\"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage * 1.01))",
"{\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0,",
"monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] *",
"and loop 2 will have different addresses... VCC = Input([(0, 4000)], \"power\") EN",
"(Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\",",
"vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300,",
"1550)], ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not",
"(\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\",",
"name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of the THUNDERX:",
"= Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0,",
"(\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\",",
"in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)],",
"__init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)],",
"927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r :",
"89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0,",
"\"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if",
"\"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0,",
"VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0,",
"\"VDD33\")}), (\"en_mgtavtt_fpga\", \"U35\", \"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\",",
"(\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\",",
"\"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL",
"node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs)))",
"\"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\",",
"states = (lambda clk, ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13,",
"FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0,",
"#TODO: add real names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p",
"[(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0,",
"MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99",
"Enzian nodes, only changes classes of ThunderX and FPGA to EVAL 3 versions",
"[Constraint([(default, default)], {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit,",
"\"SHDN\" : [{1}]} implicit_off = lambda _, thresh: {\"V_IN\" : [(0, max(thresh +",
"know which supplies to connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24 =",
"bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0, 1}], \"logical\") OUT = Output([(0,",
"node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device = device super(INA226, self).__init__(name,",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda",
"[Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}],",
"loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold)",
"p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []),",
"\"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000,",
"changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\")",
"(\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\",",
"thresh: {\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off =",
"bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured = False self.is_default = False",
"{(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"), (\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\",",
"VOUT = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900,",
"= Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}],",
"loop2, l1_addr, l2_addr): self.configured = False self.is_default = False self.threshold = threshold self.device",
"(\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga? additional",
"{\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}], \"bus\") EN = Input([{0,",
"needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 :",
"Wire.gpio_set) C_PLL_DC_OK = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\",",
"ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13",
"self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] class MAX15301(Node): implicit_off",
"Input([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)],",
"13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\",",
"#fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226,",
"1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT = lambda default : Output([(0,",
"_, threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off",
"self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600,",
": [{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\", \"EN2\"])",
"[{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr,",
"update(self, states): try: intersect(states[self.V_OUT.name], [(500, 1520)]) self.current = states[self.V_OUT.name] self.is_default = True return",
"device): self.device = device super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False def ina_monitor(self,",
"2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)], io33 : [(3140, 3460)],",
": [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 :",
"to connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}),",
"lambda _, threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]}",
"bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0, 1}], [",
"(50, 50)]} implicit_off = {\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0,",
"\"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}),",
"\"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\",",
"= lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)],",
"State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured = False",
"[\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44,",
"{}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}],",
"fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500: commands",
"SPD needs 2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13",
"sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\",",
": [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 :",
"(\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"),",
"int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869) class MAX15053(Node): implicit_on = lambda _,",
"[(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)], #not imposed by",
"[(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname,",
"= {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" :",
"[(0, 0)], vdd : [(0, 0)], en1 : [{0}], en2 : [{1}]}, {",
"1}], \"logical\") VIN = Input([(0, 13200)], \"power\") VOUT = lambda _, thresh :",
"True return [ \"init_device('%s', False)\" % (self.device) ] class MAX15301(Node): implicit_off = {\"EN\":",
"node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def construct_req(self,",
"self.device = device self.loop1 = loop1 self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr",
"EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd, en1, en2: { \"POWERED_DOWN\"",
"[(0, 0)], ddr13 : [(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24 : [(0,",
"\"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\"",
"\"BMC_VCC_3V3\", {(\"U20\", \"VCC\"), (\"U35\", \"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\",",
"(\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\",",
"{\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS",
"sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok : [{0}], rst : [{0}], clk",
"vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24",
"Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)],",
"= l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT =",
"(\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p 71 (\"IC16\",",
"\"power\", Wire.ir_set) VOUT_2 = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\"",
"0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device, pinname )) else:",
"[{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)], mgtaux_r : [(0,",
"(\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\",",
"\"VCC\")}), #add 5vsb if added bmc (\"12v_cpu0_psup\", \"psu_cpu0\", \"OUT\", {(\"U20\", \"VMON1_ATT\"), (\"IC4\", \"V_PWR\"),",
"[(1746, 1854)], mgtavcc : [(873, 927)], ok : [{1}] }, { \"POWERED_DOWN\" :",
"< 5500: commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire,",
"Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8 = Input([(0, 2000)], \"power\")",
"node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda node, name,",
": [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]} implicit_off = lambda _,",
"(self.device) ] class Clock(Node): CLK = Output([(0, 3300), (0, 60)], [([(0, 3300), (0,",
"6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name,",
"%vdd), ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst :",
"{\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def",
"configure(self): if self.configured: return [] else: self.configured = True return [ \"init_device('%s', False)\"",
": [(1450, 1550)], ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 : [(2400,",
"implicit_off = {\"VRI\" : [(0, 868)], \"VCC\" : [(0, 2374)]} VCC = Input([(0,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))],",
"\"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\",",
"[(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 :",
"{}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}],",
"\"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"),",
"vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300, 3300),",
"[partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\",",
"(\"U34\", \"VCC\"), (\"U57\", \"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\",",
"\"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}),",
": [(1746, 1854)], vcco : [(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 :",
"Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import math from",
"outputs = [] for i in range(0, 177): voltage_min = math.floor(1600 - i",
"#add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}),",
"name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT =",
"threshold self.device = device self.loop1 = loop1 self.loop2 = loop2 self.l1_addr = l1_addr",
"[] for i in range(0, 177): voltage_min = math.floor(1600 - i * 6.25)",
"\"max15301_util_3v3\"]), #UTIL_3V3 p.90 (\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301,",
"({mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt : [(0, 0)]},",
"1}, {0, 1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name,",
"clk, ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24,",
"class MAX8869(Node): implicit_on = lambda _, thresh: {\"V_IN\" : [(max(thresh + 500, 2700),",
"1}], \"logical\") V_OUT = lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970,",
"class SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50,",
"\"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\",",
"import partial import z3 class INA226(Node): BUS = Input([{0, 1}], \"bus\") VS =",
": [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({ clk:",
"{\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)],",
"\"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\",",
"1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0,",
"[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default,",
"name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}],",
"lambda _, thresh: {\"V_IN\" : [(0, max(thresh + 499, 2699))], \"SHDN\" : [{0}]}",
"1845)]}, \"\"), ({mgtavcc : [(873, 927)]}, \"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l",
"default self.is_default = False self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751, self).__init__(name,",
"= Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states = (lambda ok,",
"\"power\") VID = Input([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"\"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC)",
"[(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 :",
"Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\")",
"3600)], \"power\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0),",
"INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s (\"IC12\",",
"({vtt : [(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"), ({vcco: [(0, 0)],",
"implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\",",
"name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name))",
"= {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\" :",
"to match the schematics, so I know which supplies to connect.. :') VDD_DDR24",
"[ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" : [] } ), \"POWERED_ON\" :",
"have stabilized by now\" %vdd), ({ok : [{1}]}, \"must have written pll_mul and",
"node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node):",
"= [(self.default, self.default)] return except State_Space_Error: pass try: intersect(states[self.VDDH.name], [(8500, 14000)]) self.current =",
"1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869,",
"5000)], [Constraint([(5000, 5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300,",
"ok : [{1}] }, { \"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300, 3300),",
"== x2, x1 * 2 == x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]),",
"node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device): self.device = device for i in",
"\"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)]}, \"\"), ({vtt",
"Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)],",
"\"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"),",
"False)\" % (self.device) ] class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]}",
"partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)],",
"({clk: [(0, 0), (3300, 3300), (50, 50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"),",
"0)] }, { \"POWERED_ON\" : [ ({ok : [{0}]}, \"\") ], \"POWERED_DOWN\" :",
"dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300),",
"ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)],",
"[(1200, 1200)], ############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt :",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda",
"\"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA,",
"VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0,",
"True return [ \"init_device('%s', False)\" % (self.device) ] #EVAL 3 version of the",
"(\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"),",
"\"logical\") V_OUT = lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)],",
"\"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\",",
"class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)], \"power\") EN1",
": [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP ###### #({vcco : [(1746, 1854)],",
"[(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit,",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda node,",
"2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True except State_Space_Error:",
"= True return [ \"init_device('%s', False)\" % (self.device) ] #EVAL 3 version of",
": [(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc : [(873, 927)], ok :",
"1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the CPU names its ddr",
": [{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd:",
"{}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\",",
"z3 class INA226(Node): BUS = Input([{0, 1}], \"bus\") VS = Input([(0, 6000)], \"power\")",
"= lambda _, threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06) - 1, 2699)))],",
"{\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node):",
"= states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name],",
"1 and loop 2 will have different addresses... VCC = Input([(0, 4000)], \"power\")",
"= True return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)],",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"logical\") VOUT =",
"\"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)], \"power\") VDD_IO33 =",
": [{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR",
"2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)]}, \"%s should have stabilized",
"\"\\n\".join(commands)) return fun def configure(self): if self.configured: return [] else: self.configured = True",
"\"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}),",
"1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" :",
"ok : [{0}] }, { \"POWERED_ON\" : [ ({mgtaux_l : [(0, 0)], mgtaux_r",
"Input([{0, 1}], \"bus\") VDDH = Input([(0, 23000)], \"power\") #slave input power VR_ON =",
"{(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\", \"VID\")}),",
"match the schematics, so I know which supplies to connect.. :') VDD_DDR24 =",
": [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##################################### ({mgtavcc : [(873, 927)]}, \"\"),",
"vttddr24 : [(0, 0)], vttddr13 : [(0, 0)], io33 : [(0, 0)] },",
"(0, 0)], vcc : [(0, 0)], io : [(0, 0)], aux : [(0,",
"Input([{0, 1}], \"logical\") V_OUT = lambda default : Output([(0, 1520)], [Constraint([], {\"VR_ON\": [{1}],",
"Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on =",
"Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default,",
"\"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\",",
"(\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\",",
"(\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\",",
"\"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\"",
"0), (3300, 3300), (50, 50)], io33 : [(3140, 3460)]}, \"wait for %s to",
"return def configure(self): if self.configured: return [] else: self.configured = True return [",
"4000)], \"power\") EN = Input([{0, 1}], \"logical\") EN_2 = Input([{0, 1}], \"logical\") VIN",
": [(0, 0)], vtt_ddr24 : [(0, 0)], vtt : [(0, 0)], mgtaux_l :",
"\"POWERED_DOWN\" : PowerState({ ok : [{0}], rst : [{0}], clk : [(0, 0),",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\",",
"(\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA",
"0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)],",
"\"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"),",
"[] else: self.configured = True return [ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\"",
"vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" :",
"bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\":",
"\"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}),",
"\"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self,",
"\"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\",",
"partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit,",
"FOR EVAL 3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13",
"0)], mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)], mgtavcc : [(0, 0)],",
"(\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\",",
"2600)], ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)],",
"except State_Space_Error: pass class NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\" :",
"before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1}, {0, 1},",
"mgtavcc : [(873, 927)], ok : [{1}] }, { \"POWERED_DOWN\" : [ ({clk:",
"(\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\",",
"\"power\") VOUT = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" :",
"lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class",
"= Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT = lambda default",
": [(0, 2374)]} VCC = Input([(0, 6000)], \"power\") VRI = Input([(0, 6000)], \"power\")",
"[{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on =",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"(3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50,",
"node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT",
"+ 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1},",
"3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0),",
"\"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0,",
"(\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\",",
"0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})),",
"(\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\",",
"\"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\":",
"\" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\") ], \"POWERED_ON\": []})",
": [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\" : [{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")),",
"bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT",
"\"logical\") states = (lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0,",
"list({0} for i in range(8 - len(binary))) for i in binary: multidim.append({int(i)}) return",
"\"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd : [(0, 0)], en1 : [{0}], en2",
"name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off = {\"VCC\": [(0, 2600)],",
"], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1 : [{1}], en2",
"node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT",
"[]), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]),",
"z3.Or(x1 * 2 == x2, x1 * 2 == x2 + 1), ([(\"VREF\",",
"threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured = False self.is_default = False self.threshold",
"before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\",",
"ddr24: [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], #not imposed by the cpu, but",
"0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75",
": [(0, 0)], vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]}, \"\"), ({aux :",
"\"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\",",
"node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN = Input([(0, 6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\",",
"{\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]} implicit_off = {\"VDD\":",
"\"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\",",
"[(0, 830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT",
"vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vtt",
"1}], \"logical\") states = (lambda vdd33, vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd33:",
"lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self, name, bus_addr, device): self.device = device",
"[]), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61,",
"not wire in req: req[wire] = state else: req[wire] = state_union(state, req[wire]) print(req)",
"(\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\",",
"{self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name :",
"[ ({vdd: [(2300, 2400)]}, \"wait until \" + vdd + \" stabilized\"), ({en1",
"1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES: #######",
"5250)]) self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False",
"1600)]) self.is_default = True except State_Space_Error: self.is_default = False class IR(Node): implicit_off =",
"= state else: req[wire] = state_union(state, req[wire]) print(req) return req def construct_dependency(self, name,",
"MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87",
"\"\"), ({vtt : [(1164, 1236)]}, \"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746,",
"3300), (0, 60)], {}, [], lambda node: node.indep(\"CLK\"))], \"clock\", Wire.clock_config) def __init__(self, name,",
"\"power\") #this is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)],",
"[]), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []),",
"def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node): implicit_on = lambda",
"[(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self,",
"(\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\",",
"{(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\",",
"= Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]},",
"stabilized by now\" %vdd), ###################################### ({ok : [{1}]}, \"must have written pll_mul and",
"\"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\",",
"= Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"\"OUT\" + str(number) output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\":",
"def generate_output(self, number): name = \"OUT\" + str(number) output = Output([{0, 1}], [Constraint([{1,",
"= default self.is_default = False self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default) super(MAX20751,",
"13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name,",
"2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0,",
"[(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 :",
"[{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN",
"{(\"U35\", \"VMON3_ATT\"), (\"U44\", \"VS\"), (\"U48\", \"VS\"), (\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\",",
"False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass",
"#vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR EVAL 3:",
"\"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\",",
"{(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\",",
"(\"U37\", \"VDD33\"), (\"U41\", \"VDD33\"), (\"IC5\", \"V_IN\"), (\"IC6\", \"V_IN\"), (\"IC7\", \"V_IN\"), (\"IC8\", \"V_IN\"), (\"IC9\",",
"name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname, wire_name, multiplier =",
"\"power\") VCCINT_IO = Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\") states =",
"bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name],",
": set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\":",
"def fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0] ==",
"1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC,",
"3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\",",
"(\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\",",
"13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\",",
"Input([(0, 1400)], \"power\") VDD_IO33 = Input([(0, 3600)], \"power\") states = (lambda ok, rst,",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda node,",
"[(1200, 1200)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]},",
": [(0, 0)], vttddr13 : [(0, 0)], io33 : [(0, 0)] }, {",
"partial import z3 class INA226(Node): BUS = Input([{0, 1}], \"bus\") VS = Input([(0,",
"(\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}),",
"\"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\", \"VR_ON\")}),",
"3 ########### ddr24: [(1200, 1200)], ddr13: [(1200, 1200)], ################################# ddr24_2v5 : [(2400, 2600)],",
"\"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1 : [{1}],",
"True return except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current =",
"{ \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300), (50, 50)], io33 : [(3140,",
"= Input([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1},",
"{(\"U48\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\",",
"\"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\"",
"return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD",
"0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name,",
"(\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\",",
"self.default = default self.is_default = False self.current = [(default, default)] self.V_OUT = MAX20751.V_OUT(default)",
"return except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)]) self.current = [(self.default,",
"self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN = Input([{0, 1}], \"logical\") V33_PSU = Output([(0,",
"\"BMC_VCC_3V3\", {}))], \"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit,",
"PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\",",
"(\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\",",
"except State_Space_Error: self.configured = False return def configure(self): if self.configured: return [] else:",
"super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False def ina_monitor(self, wire_name): def fun(value, states,",
": [{1}], clk : [(0, 0), (3300, 3300), (50, 50)], vdd : [(940,",
"\"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda _, thresh : Output([(0, 3040)], [Constraint([(500,",
"(0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0,",
"bus_req(self): return {} def bus_req_off(self): return {} def update(self, states): try: intersect(states[self.V_OUT.name], [(500,",
"\"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}),",
"fun(value, _, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0] == 0:",
"range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False def generate_output(self, number):",
"#MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053,",
"50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])),",
"[(2600, 3600)]) except State_Space_Error: self.configured = False def configure(self): if self.configured: return []",
"stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000, 2600)]}, \"\") ],",
"node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node:",
"v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f,",
"(\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\",",
"[partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\",",
"{\"V_IN\" : [(max(thresh + 500, 2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")),",
"= Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\") states = (lambda clk,",
"mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300, 3300), (0, 0)], vcc",
"[{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold, device,",
"[{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0, 1}], \"bus\") EN = Input([{0, 1}],",
"name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 =",
"VCC = Input([(0, 4000)], \"power\") EN = Input([{0, 1}], \"logical\") EN_2 = Input([{0,",
"1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit,",
"ddr bank voltage DDR02 (not 24), #but I adjusted it to match the",
"node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\",",
"{\"VCC\": [(0, 2600)], \"VCC_IN\": [(0, 6000)]}, partial(Constraint.implicit, name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)],",
"en2: { \"POWERED_DOWN\" : PowerState({vdd33: [(0, 0)], vdd : [(0, 0)], en1 :",
"= Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 =",
"50)], vdd : [(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)],",
"{self.V_PWR.name: [(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name]",
"0)], vttddr24 : [(0, 0)], vttddr13 : [(0, 0)], io33 : [(0, 0)]",
"Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\")",
"{}))], \"power\") V5_PSU = Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\",",
"Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]},",
"set(filter(lambda x: not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}],",
"\"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"),",
"[(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0),",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9 = Input([(0, 5734)], \"monitor\",",
"Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit,",
"\"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"),",
"\"VDD\"), (\"U11\", \"VDD\"), (\"U16\", \"VDD\"), (\"oscillator\", \"VDD\")}), #(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc",
"if self.configured: return [] else: self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\" ]",
"[ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\":",
"input power VR_ON = Input([{0, 1}], \"logical\") V_OUT = lambda default : Output([(0,",
": [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until \" + vdd",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda",
"lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda",
"{\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator)",
"Input([(0, 3600)], \"power\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0,",
"until \" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000,",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)],",
": [(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\" : [(0, 868)],",
"Input([{0, 1}], \"logical\") V_OUT = lambda _, default, threshold: Output([(0, default)], [Constraint([(default, default)],",
"Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr):",
"[(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\",",
"[(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]} VDD = Input([(0, 3600)],",
"node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\",",
"[(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)],",
"real names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\",",
"), \"POWERED_ON\" : PowerState({ ok : [{1}], rst : [{1}], clk : [(0,",
"1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)],",
"5000)], {}, partial(Constraint.implicit, \"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {},",
"VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0,",
"v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105 *",
"\"VCC\" : [(2375, 5500)]} implicit_off = {\"VRI\" : [(0, 868)], \"VCC\" : [(0,",
"+ 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i +",
"1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)], mgtaux_l : [(1746, 1854)],",
": [(2400, 2600)], #not imposed by the cpu, but the connected DIMM SPD",
"def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] < 5500:",
"3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200,",
"3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0,",
"[(8500, 14000)]) self.current = [(self.default, self.default)] return except State_Space_Error: pass class Oscillator(Node): VDD",
"1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400,",
"name)) VMON9 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 =",
"0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p",
"\"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\", \"VCC\"),",
"(0, 50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\")",
"[{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until \" + vdd +",
"= Input([{0, 1}], \"logical\") states = (lambda vdd, en1, en2: { \"POWERED_DOWN\" :",
"name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name))",
"#(\"12v_psup\", \"main_psu\", \"V12_PSU\", {}), #add bmc (\"5v_psup\", \"main_psu\", \"V5_PSU\", {(\"U35\", \"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"),",
"MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO = Input([(0,",
"14000)], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"},",
"{}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name,",
"\"logical\", Wire.vid_set) B_CDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
": binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" :",
"class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))],",
"\"\"), ({io : [(0, 0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\"",
"default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on),",
"beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\",",
"ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41,",
"self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage *",
"0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L, p 88",
"vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13",
"{\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU",
"Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default = False super(ISL, self).__init__(name,",
"[\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]),",
"\"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\",",
": [(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc :",
"\"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\",",
"self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured = False return",
"p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\", 0x0, IR, [4500,",
"in binary: multidim.append({int(i)}) return multidim def isl_outputs(): outputs = [] for i in",
"\"power\") states = (lambda ok, rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13,",
"5500)], \"SHDN\" : [{1}]} implicit_off = lambda _, thresh: {\"V_IN\" : [(0, max(thresh",
"vdd15 : [(1450, 1550)]}, \"\"), #### REGULAR TRANSITION STEP ######### #({ddr24 : [(1425,",
"{\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0,",
"(\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226,",
"ddr24 : [(0, 0)], ddr24_2v5 : [(0, 0)], ddr13 : [(0, 0)], ddr13_2v5",
"+ 2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]},",
"\"%s should have stabilized by now\" %vdd), #### FOR EVAL 3 ###################### ({ddr24",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda",
"[(2900, 3630)]} def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]}",
"class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]} implicit_off = {\"VR_ON\":",
"{(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\",",
"implicit_on = lambda _, threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\"",
"830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR = Input([{0, 1}], \"logical\") EN_VTT =",
"list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return fun def configure(self): if self.configured: return",
"{\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self,",
"\"ir3581\" bus = \"power\" BUS = Input([{0, 1}], \"bus\") #loop 1 and loop",
"device self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name],",
": [{0}], en2 : [{1}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ],",
"TRANSITION STEP ######### #({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 :",
"super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)], \"power\") EN1 =",
"\"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"),",
"wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device, pinname",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\",",
"[{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\":",
"[]), (\"U57\", 0x0, SI5395, [\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]),",
"lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda",
"names its ddr bank voltage DDR02 (not 24), #but I adjusted it to",
"\"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"SHDN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\",",
"\"EN\"}, set())])), Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN\"",
"[Constraint([(435, 1800)], {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"),",
"\"V5SB_PSU\", {}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))],",
"\"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\",",
"\"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\",",
"\"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga?",
"({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until \" + vdd + \"",
"= (lambda vdd, en1, en2: { \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1",
"addresses... VCC = Input([(0, 4000)], \"power\") EN = Input([{0, 1}], \"logical\") EN_2 =",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 = Input([(0, 5734)], \"monitor\",",
": [(0, 0)], mgtaux_r : [(0, 0)], mgtavcc : [(0, 0)], ok :",
"1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\" : []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\",",
"= Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N",
"VOUT_2 = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900,",
"lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda node,",
"\"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0,",
"\"\") ], \"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\",",
"[(700, 800)]}, \"%s should have stabilized by now\" %vdd), ({ok : [{1}]}, \"must",
"6000)], \"power\", lambda node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\", lambda",
"V_IN = Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT = lambda",
"\"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes",
"MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0,",
"vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24",
"partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured, [partial(Constraint.implicit, \"CLK\", \"implicit_on\"), partial(Constraint.explicit, \"CLK\", {}, set())])), Constraint([(0,",
"{(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\",",
"lambda node, inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node,",
"bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda",
"pinname )) return (True, \"\\n\".join(commands)) return fun def configure(self): if self.configured: return []",
"{0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set)",
"\"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\",",
"[(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name,",
"commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105 *",
"self).__init__(name, bus_addr, INA226) self.configured = False def ina_monitor(self, wire_name): def fun(value, states, node=self,",
"3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]},",
"3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR EVAL 3: ##### vcc_ddr13 :",
"[{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\":",
"6000)], \"power\") VRI = Input([(0, 6000)], \"power\") #reference input VREF = Output([(0, 6000)],",
"\"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\",",
"= Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8",
"\"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off))",
"(Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\",",
"name)) VCC = Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def __init__(self,",
"0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\",",
"def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except State_Space_Error: self.is_default",
"[(0, 0)], en1 : [{0}], en2 : [{0}]}, { \"POWERED_ON\": [ ({en1 :",
"{\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"},",
"MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []),",
"[(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)], mgtaux_l : [(1746,",
"[(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600, 3600)]})), Constraint([(0, 0), (3300, 3300), (0, 0)],",
"{\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def",
"EVAL 3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], ############################# vtt_ddr13",
"= False def ina_monitor(self, wire_name): def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] >",
"mgtaux_r : [(0, 0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc : [(0,",
"2500)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]},",
"(\"fpga\", \"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\",",
"Output([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\", {(\"U20\", \"VMON7_ATT\"), (\"U24\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\",",
"######### #ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR EVAL 3 ###########",
"\"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"),",
"\"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70,",
"inputs: unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self, inputs): req = {} for",
"(\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\",",
": [(2375, 5500)]} implicit_off = {\"VRI\" : [(0, 868)], \"VCC\" : [(0, 2374)]}",
"self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR)",
"2000)], \"power\") states = (lambda clk, ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13,",
"changes classes of ThunderX and FPGA to EVAL 3 versions enzian_nodes_EVAL3 = [",
"partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr, device): self.device = device",
"\"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}),",
"the schematics, so I know which supplies to connect.. :') VDD_DDR24 = Input([(0,",
"BUS = Output([{0, 1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda node, name,",
"state else: req[wire] = state_union(state, req[wire]) print(req) return req def construct_dependency(self, name, req):",
"+ vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({vdd: [(2000, 2200)]}, \"\")",
"[2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053, [2500]), #2V5_CPU13 p",
"\"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\", {(\"U20\", \"VMON12\"), (\"cpu\", \"VTT_DDR24\")}), (\"pll_ref_clk\", \"U11\", \"CLK\",",
"[(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off = {\"VCC\" :",
": [(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5 :",
"return {self.V_PWR.name: [(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current =",
"Output([(0, 5000)], [ Constraint([(5000, 5000)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0,",
"\"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Clock, self).__init__(name, bus_addr, Clock) class PowerConsumer(Node): node_string",
"update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured = False def configure(self):",
"the THUNDERX: Comments indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L",
"{(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\", {(\"U20\",",
": [(700, 800)], vttddr13 : [(700, 800)], io33 : [(3140, 3460)], }, {",
"[]}) }, [\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\",",
"{(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\",",
"vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vtt : [(0, 0)], mgtaux_l",
"CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300, 3300),",
"self.current = states[self.V_OUT.name] self.is_default = True return except State_Space_Error: self.is_default = False try:",
"inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda node, name, inputs: node.construct_dependency(name,",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5_ATT\", name)) VMON6 = Input([(0, 5734)], \"monitor\",",
"VDD_2V5_DDR24 = Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0,",
"name)) VMON6 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 =",
"[\"ina226_ddr_cpu_24\"]), #TODO: add real names of MAX15301s (\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA",
"{\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\",",
"0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p",
"1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr,",
": [(1746, 1854)], vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 :",
"False self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def",
"states = (lambda ok, rst, clk, vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5,",
"of the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300,",
": [(870, 930)], vdd15 : [(1450, 1550)], ####### REGULAR VALUES ######### #ddr13 :",
"p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900,",
"\"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0,",
"\"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until \" + vdd +",
"Wire.pin_set) setattr(self, name, output) def isppac_monitor(self, pinname, wire_name, multiplier = 1): def fun(value,",
"name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device = device super(INA226, self).__init__(name, bus_addr,",
"name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False def configure(self): if self.configured:",
"class MAX15053(Node): implicit_on = lambda _, threshold: {\"V_IN\" : [(max(int(threshold * 1.06), 2700),",
"lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr): super(Bus,",
"VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0,",
"Input([(0, 2500)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0, 1}], \"logical\")",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON5\", name)) VMON5_ATT = Input([(0, 5734)], \"monitor\",",
"0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0,",
"MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device = device",
"Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import math from functools import partial",
"(\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes = [ (\"power_bus\",",
"I know which supplies to connect.. :') VDD_DDR24 = Input([(0, 1650)], \"power\") VDD_2V5_DDR24",
"3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)], \"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace",
"(0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\",",
"[(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc : [(873, 927)], ok : [{1}]",
"Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\" : [(4750, 5250)],",
"Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)],",
"_, thresh: {\"V_IN\" : [(0, max(thresh + 499, 2699))], \"SHDN\" : [{0}]} V_IN",
"(\"pll_ref_clk\", \"U11\", \"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\",",
"ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\", 0x40, INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\",",
"(\"IC13\", \"BUS\"), (\"IC4\", \"BUS\"), (\"IC11\", \"BUS\"), (\"U34\", \"BUS\"), (\"U37\", \"BUS\"), (\"U41\", \"BUS\"), (\"U51\",",
": [(1746, 1854)], mgtavcc : [(873, 927)], ok : [{1}] }, { \"POWERED_DOWN\"",
"after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\", \"EN_2\"}, set())])), Constraint([(0, 0)], {\"VCC\"",
"Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC =",
"self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {} def bus_req_off(self): return {} def update(self,",
"self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of the FPGA, comments indicate changes",
"0)], ddr13 : [(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24 : [(0, 0)],",
"node, name: node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\",",
"\"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\": [{0}], \"VDD33\":[(0, 3630)], \"VDDH\":[(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)),",
"v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name, node.device, pinname )) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f,",
"(\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\", \"U35\", \"OUT12\", {(\"IC6\", \"SHDN\")}), (\"mgtvccaux_l\", \"IC5\", \"V_OUT\",",
": [(0, 0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"), ], \"POWERED_DOWN\" : []",
": [(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit,",
": [(870, 930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5",
"* list(value[0])[0], node.device )) return fun def configure(self): if self.configured: return [] else:",
"Node, Input, Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection",
"Input([{0, 1}], \"logical\") EN_VTT = Input([(0, 12000)], \"power\") VID = Input([{0, 1}, {0,",
"have different addresses... VCC = Input([(0, 4000)], \"power\") EN = Input([{0, 1}], \"logical\")",
": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set",
"= \"ir3581\" bus = \"power\" BUS = Input([{0, 1}], \"bus\") #loop 1 and",
"super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of the THUNDERX: Comments indicate",
"\"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\", Wire.ir_set)",
"State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current = [(self.default, self.default)] return",
"V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700, 800)],",
"\"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ##### REGULAR",
"12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]},",
"[(0, 2500)], \"VIN\" : [(0, thresh-1)], \"EN_2\" : [{0}]} device = \"ir3581\" bus",
"changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK",
"\"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0, 3300)], \"power\") #actually, the",
"\"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\",",
"partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)],",
"VDD_IO33 = Input([(0, 3600)], \"power\") states = (lambda ok, rst, clk, vdd, vdd09,",
"\"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\",",
"[(1450, 1550)], ####### REGULAR VALUES ######### #ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)],",
"{\"VCC\", \"EN_VTT\", \"EN_PWR\"}), partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\",",
"[([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0,",
"(\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\",",
"({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0, 0)], vcc_ddr13 :",
"\"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0, 1}], \"logical\") OUT = Output([(0, 12000)],",
"connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\",",
"PowerState({vdd33: [(0, 0)], vdd : [(0, 0)], en1 : [{0}], en2 : [{1}]},",
"[{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0",
"return [] else: self.configured = True return [ \"init_device('%s', False)\" % (self.device) ]",
"\"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}),",
"(\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP",
"\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 * list(value[0])[0], 0.00105 *",
"1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300),",
"node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT1\"))],",
": Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update",
"\"VR_ON\")}), (\"vccint_fpga\", \"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}),",
"\"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\",",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\",",
"VMON4 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0,",
"\"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\",",
"V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\"",
"(lambda clk, ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13,",
"EN = Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\":",
"{} for node, _ in inputs: unite_dict(req, node.bus_req()) print(req) return req def construct_req_off(self,",
"[{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ],",
"VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 = Input([(0,",
"partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)],",
"PowerConsumer(Node): node_string = \"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\") def __init__(self, name,",
"(self.device) ] #EVAL 3 version of the Enzian nodes, only changes classes of",
"nodes (\"vtt_ddrfpga13\", \"U39\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\",",
": [(570, 630)], vttddr13 : [(570, 630)], io33 : [(3140, 3460)], }, {",
"inputs: node.construct_req(inputs), lambda node, name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {},",
": [(1164, 1236)], mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc :",
"\"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\", \"OUT19\", {(\"U47\", \"EN_PWR\")}), (\"vdd_ddrfpga13\", \"U43\", \"VOUT\", {(\"U48\",",
"0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60,",
"(\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA",
"dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit,",
"Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\",",
"* list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] * multiplier, node.device, pinname )) return",
"0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0,",
"= [] for i in range(0, 177): voltage_min = math.floor(1600 - i *",
"construct_req_off(self, inputs): req = {} for node, _ in inputs: node_req = node.bus_req_off()",
"\"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300),",
"self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False def generate_output(self, number): name =",
"= Input([(0, 14000)], \"power\") V_OUT = lambda default : Output([(0, 5250)], [Constraint([], {\"EN\":",
"= device self.loop1 = loop1 self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr =",
"= Output([(0, 3300), (0, 60)], [([(0, 3300), (0, 60)], {}, [], lambda node:",
"71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []),",
"ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default = True except State_Space_Error:",
"\"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA)",
"\"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0, 1650)], \"power\") VDD_2V5_DDR13 =",
"self.loop1, \"init_device('%s', False)\" % self.loop2 ] class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300,",
"[]}), \"POWERED_ON\" : PowerState({vdd : [(2300, 2600)], en1 : [{1}], en2 : [{0}]},",
"thresh : Output([(0, 3040)], [Constraint([(500, 3040)], {\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh,",
"VID = Input([{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"\"bus\") EN = Input([{0, 1}], \"logical\") V_PWR = Input([(0, 14000)], \"power\") V_OUT =",
"(max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0, 6000)],",
"#slave input power VR_ON = Input([{0, 1}], \"logical\") V_OUT = lambda default :",
"self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN = Input([{0, 1}], \"logical\") OUT =",
"IR(Node): implicit_off = lambda _, thresh : {\"VCC\" : [(0, 2500)], \"VIN\" :",
"node.device )) return fun def configure(self): if self.configured: return [] else: self.configured =",
"[(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL 3: ################### ({vcco :",
"def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)])",
"5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2 == x2,",
"VCCO_1V8 = Input([(0, 2000)], \"power\") #this is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\")",
"Input([{0, 1}], \"logical\") V_OUT = lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default, default)],",
"2700), 5500)], \"V_EN\" : [{1}]} implicit_off = lambda _, threshold: {\"V_IN\" : [(0,",
"(\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"), (\"U35\", \"VCC_IN\")}), (\"bmc_vcc_3v3\", \"main_psu\", \"BMC_VCC_3V3\", {(\"U20\",",
"(\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\",",
"0)], {\"EN\": [{0}], \"V_PWR\": [(0, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def",
"vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24",
"\"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit,",
"inputs): req = {} for node, _ in inputs: node_req = node.bus_req_off() for",
"VADJ_1V8 = Input([(0, 2000)], \"power\") #not found in fpga boot sequ; filled in",
"OUT2 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def",
"* 1.06) - 1, 2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\")",
"1}], \"logical\") VOUT = Output([(0, 1600)], isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default",
"voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 =",
"partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU)",
"version of the Enzian nodes, only changes classes of ThunderX and FPGA to",
"def isppac_monitor(self, pinname, wire_name, multiplier = 1): def fun(value, _, node=self, wire_name=wire_name, pinname=pinname,",
"in range(0, 177): voltage_min = math.floor(1600 - i * 6.25) voltage_max = math.ceil(1600",
"class FPGA(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK =",
"return [ \"init_device('%s', False)\" % (self.device) ] class MAX15301(Node): implicit_off = {\"EN\": [{0}],",
"have stabilized by now\" %vdd), ###################################### ({ok : [{1}]}, \"must have written pll_mul",
"5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\",
"\"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name,",
": [{1}], \"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\",",
"FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP,",
": [(0, 0)], en1 : [{0}], en2 : [{1}]}, { \"POWERED_ON\": [ ({en1",
"] class Clock(Node): CLK = Output([(0, 3300), (0, 60)], [([(0, 3300), (0, 60)],",
"bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False def configure(self): if self.configured: return",
"VDD_09 = Input([(0, 945)], \"power\") VDD_15 = Input([(0, 1650)], \"power\") VDD_DDR13 = Input([(0,",
"node, inputs: {}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self,",
"############################# vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)],",
"{(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\",",
"\"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"), (\"IC11\", \"V_PWR\"), (\"IC12\",",
"Input([(0, 1000)], \"power\") VCCAUX = Input([(0, 2000)], \"power\") states = (lambda clk, ok,",
"PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") VDD = Input([(0, 1210)],",
"\"CLK\", {(\"cpu\", \"PLL_REF_CLK\")}), (\"bus\", \"power_bus\", \"BUS\", {(\"IC10\", \"BUS\"), (\"IC12\", \"BUS\"), (\"IC13\", \"BUS\"), (\"IC4\",",
"2600)], vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], vtt_ddr13 : [(550, 1700)],",
"= lambda default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit,",
"930)], vdd15 : [(1450, 1550)], ####### REGULAR VALUES ######### #ddr13 : [(1425, 1575)],",
"1}], \"EN_VTT\" : [(0, 830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR = Input([{0,",
"Constraint([(0, 0)], {\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]},",
"super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default =",
"MAX15053) def binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix multidim = list({0} for",
"Constraint([(0, 0), (3300, 3300), (0, 0)], {\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]},",
"pass class NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]}",
"[{0}], en2 : [{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]}, \"\") ], \"POWERED_DOWN\":",
"in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name, bus_addr, ISPPAC) self.configured = False def generate_output(self,",
": [(3140, 3460)]}, \"wait for %s to stabilize\" %(io33)), ({vdd : [(940, 980)]},",
"[\"clk_main\"]), (\"U11\", 0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []),",
"\"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\", {(\"U35\",",
"FPGA) #EVAL 3 version of the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK",
"4000)], vdd : [(2000, 2500)], en1 : [{1}], en2 : [{0}]}, { \"POWERED_DOWN\":",
"], \"POWERED_DOWN\" : [] }), \"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300, 3300),",
"{}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\",",
"MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\",",
"(\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}),",
"({vdd: [(2300, 2400)]}, \"wait until \" + vdd + \" stabilized\"), ({en1 :",
"vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"),",
"class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK",
"(\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\",",
"[(0, 0)], vdd09 : [(0, 0)], vdd15 : [(0, 0)], ddr24 : [(0,",
"{}, lambda node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr):",
"12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD",
"\"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\",",
"2 == x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" :",
"vcco : [(0, 0)], vadj : [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24",
": [(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)], mgtavcc :",
"( wire, 0.00095 * list(value[0])[0], 0.00105 * list(value[0])[0], node.device )) return (True, \"\\n\".join(commands))",
"self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off = self.implicit_off(threshold) self.implicit_off_2",
"[{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\"}, {\"VR_ON\"}, after_set =",
"{}, partial(Constraint.explicit, \"C_PLL_DC_OK\", set(), set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1},",
"ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24 p 75 (\"U37\",",
"], \"power\", Wire.ir_set) def __init__(self, name, bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr):",
"self).__init__(name, bus_addr, Bus) def construct_req(self, inputs): req = {} for node, _ in",
"\"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS =",
"else: self.configured = True return [ \"init_device('%s', False)\" % (self.device) ] class Clock(Node):",
"1}], [Constraint([{1, 0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"},",
"[]), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC",
"(\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0, ThunderX, []), (\"bmc\", 0x0, BMC, []), (\"U24\",",
"630)], vttddr13 : [(570, 630)]}, \"%s should have stabilized by now\" %vdd), ######################################",
"[(0, 0)], ddr13_2v5 : [(0, 0)], vttddr24 : [(0, 0)], vttddr13 : [(0,",
": [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0), (3300, 3300), (50,",
"\"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}), (\"vdd_ddrcpu24\", \"U30\", \"VOUT\", {(\"U20\", \"VMON10\"), (\"U31\",",
"= Input([(0, 3300)], \"power\") VTT_DDR24 = Input([(0, 1400)], \"power\") VTT_DDR13 = Input([(0, 1400)],",
"(self.device) ] class MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on =",
"{ \"VRI\" : [(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))], \"power\")",
"[(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ],",
"#actually, the CPU names its ddr bank voltage DDR02 (not 24), #but I",
"1900)], \"power\") MGTVCCAUX_R = Input([(0, 1900)], \"power\") VCCO_1V8 = Input([(0, 2000)], \"power\") #this",
"[Constraint([], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500,",
"+ \" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 : [{1}], vdd: [(2000, 2600)]},",
"\"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd : [(2000, 2500)], en1 : [{1}], en2",
"vadj : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], #####",
"(\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"),",
"1854)], mgtavcc : [(873, 927)], ok : [{1}] }, { \"POWERED_DOWN\" : [",
"0)], vttddr13 : [(0, 0)], io33 : [(0, 0)] }, { \"POWERED_ON\" :",
"for i in range(8 - len(binary))) for i in binary: multidim.append({int(i)}) return multidim",
"name)) VMON12 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON12\", name)) VCC_IN =",
"OUT0 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1",
"0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3",
"_, threshold: {\"V_IN\" : [(0, (max(int(threshold * 1.06) - 1, 2699)))], \"V_EN\" :",
"0), (3300, 3300), (50, 50)]}, \"\"), ({vcc : [(873, 927)]}, \"\"), ({io :",
"(\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\", {(\"U20\", \"VCC_IN\"),",
"[(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ({mgtavcc : [(873,",
"\"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"},",
"#not found in fpga boot sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L =",
": [{0}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on",
"0)]}, \"\"), ({aux : [(0, 0)]}, \"\"), ({io : [(0, 0)]}, \"\"), ({vcc",
"800)]}, \"%s should have stabilized by now\" %vdd), ({ok : [{1}]}, \"must have",
"\"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\":",
": [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)]}, \"%s should",
"{}))], \"power\") BMC_VCC_3V3 = Output([(3300, 3300)], [Constraint([(3300, 3300)], {}, partial(Constraint.implicit, \"BMC_VCC_3V3\", {}))], \"power\")",
"0}], {\"VCC\": [(2800, 3960)], \"VCC_IN\": [(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}],",
"Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\",",
"x2 + 1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)],",
"#\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3",
"{}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr,",
"VDD = Input([(0, 2600)], \"power\") EN1 = Input([{0, 1}], \"logical\") EN2 = Input([{0,",
"[(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400,",
"[(0, 0)], mgtaux_l : [(0, 0)], mgtaux_r : [(0, 0)], mgtavcc : [(0,",
"1}], \"VCC\" : [(0, 5250)], \"EN_PWR\" : [{0}], \"EN_VTT\" : [(0, 14000)]}, partial(Constraint.implicit,",
"\"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\",",
"req: req[wire] = state else: req[wire] = state_union(state, req[wire]) print(req) return req def",
"set()))], \"logical\", Wire.cpu_clk_ok) B_FDV_1V8 = Output([{0, 1}, {0, 1}, {0, 1}, {0, 1},",
"\"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}),",
"3 version of the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0,",
"{}, [], lambda node: node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)], {},",
"Input([(0, 13200)], \"power\") VOUT = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)],",
"{(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}), (\"en_mgtvccaux_l\", \"U35\", \"OUT11\", {(\"IC5\", \"SHDN\")}), (\"en_mgtvccaux_r\",",
"[900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\",",
"VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13",
"1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\") MGTAVTT = Input([(0, 1300)], \"power\") VCCINT_IO",
": [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i",
"5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\",",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda node,",
"{(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\", {(\"U20\", \"VMON4\"), (\"cpu\", \"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\",",
"1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL 3:",
"800)], vttddr13 : [(700, 800)]}, \"%s should have stabilized by now\" %vdd), ####",
"INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real",
": [{1}], \"EN_VTT\" : [(870, 14000)]}, {}, \\ dependency_update= (Constraint.is_default, \\ [partial(Constraint.implicit, \"VOUT\",",
": PowerState({ ok : [{1}], rst : [{1}], clk : [(0, 0), (3300,",
"bus_addr, device): self.device = device for i in range(0, 20): self.generate_output(i) super(ISPPAC, self).__init__(name,",
"50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\",",
"self.device = device super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False def ina_monitor(self, wire_name):",
"868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" :",
": { \"POWERED_DOWN\" : PowerState({ ok : [{0}], rst : [{0}], clk :",
"3300), (50, 50)], io33 : [(3140, 3460)]}, \"wait for %s to stabilize\" %(io33)),",
"{ \"POWERED_DOWN\" : PowerState({vdd : [(0, 0)], en1 : [{0}], en2 : [{0}]},",
"bus_addr, default, device): self.device = device self.default = default self.is_default = False self.current",
"\"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set",
"[]), (\"bmc\", 0x0, BMC, []), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP, []),",
"self.configured: return [] else: self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\" ] class",
": { #pll_vdd, pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok :",
"Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0,",
"multiplier=multiplier): commands = node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\"",
"800)]}, \"%s should have stabilized by now\" %vdd), #### FOR EVAL 3 ######################",
"[]), #VDD_DDRCPU24 p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\",",
"def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name : [(0, 2500)]} def update(self,",
"{(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\", \"OUT9\", {(\"U51\",",
"node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4",
"\"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\", \"IC4\", \"V_OUT\",",
"name, bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states): try:",
"[{1}] }, { \"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300, 3300), (50, 50)]},",
"\"logical\") V_OUT = lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" :",
"927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ({vcco : [(1746, 1854)], vcc_2v5_ddr13 :",
"2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node):",
"= False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)])",
"1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON = Output([{0,",
"\"U51\", \"V_OUT\", {(\"U35\", \"VMON6\"), (\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\",",
"state_update= Constraint.default_state), Constraint([(500, 1520)], {\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)], \"BUS\" : [{1}]},",
"\"MGTVCCAUX_L\")}), (\"mgtvccaux_r\", \"IC6\", \"V_OUT\", {(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}),",
"= (lambda clk, ok, vcc, io, aux, vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], \"VCC\"",
"name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 =",
"- i * 6.25) voltage_max = math.ceil(1600 - i * 6.25) outputs.append( Constraint(",
"% ( wire_name, 0.00095 * list(value[0])[0] * multiplier, 0.00105 * list(value[0])[0] * multiplier,",
"0x70, MAX20751, [900, \"max20751_vccint_fpga\"]), #VCCINT_FPGA p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p",
"\"VMON2_ATT\"), (\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\",",
"name, inputs: node.construct_dependency(name, node.construct_req(inputs))), Constraint([{0}], lambda node, inputs: {}, lambda node, name, inputs:",
"self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053, self).__init__(name, bus_addr, MAX15053) def binary_multidimensional(decimal): binary",
"\\ [partial(Constraint.implicit, \"VOUT\", {\"VID\" : binary_multidimensional(i + 2)}, after_set={\"EN_PWR\"}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"}),",
"self).__init__(name, bus_addr, SI5395) def update(self, states): try: intersect(states[self.VDD.name], [(2600, 3600)]) except State_Space_Error: self.configured",
"node.indep(\"OUT1\"))], \"power\") OUT2 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))],",
"binary_multidimensional(decimal): binary = bin(decimal)[2:] #remove 0b prefix multidim = list({0} for i in",
"name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN = Input([{0, 1}], \"logical\")",
"[\"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class",
"node.isppac_monitor(\"VMON8\", name)) VMON8_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON8_ATT\", name)) VMON9",
"the cpu, but the connected DIMM SPD needs 2.5 V ddr13_2v5 : [(2400,",
"additional vcco thingy? (\"en_sys_2v5_13\", \"U35\", \"OUT7\", { (\"IC8\", \"V_EN\")}), (\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\",",
": [(873, 927)], io : [(873, 927)], aux : [(1746, 1854)], vcco :",
"partial(Constraint.implicit, \"VREF\", \"implicit_on\"), complex_constraints= [(lambda x1, x2: z3.Or(x1 * 2 == x2, x1",
"1200)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5 : [(2400, 2600)],",
"\"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\" % self.loop2 ]",
"Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_CLOCK_FLOL\", set(), set()))], \"logical\", Wire.fpga_clk_ok) B_PSUP_ON =",
"= Input([(0, 3600)], \"power\") states = (lambda ok, rst, clk, vdd, vdd09, vdd15,",
"{\"VCC\" : [(0, 3630)], \"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\",",
"= Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 =",
": []} )}, [\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\",",
"2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\",",
"* multiplier, node.device, pinname )) return (True, \"\\n\".join(commands)) return fun def configure(self): if",
"99 (\"U34\", 0x0, IR, [4500, \"ir3581\", \"ir3581_loop_vdd_core\", \"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p",
"for i in range(0, 177): voltage_min = math.floor(1600 - i * 6.25) voltage_max",
"should have stabilized by now\" %vdd), #### FOR EVAL 3 ###################### ({ddr24 :",
"outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\" : [(4750,",
"\"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT = lambda _, default, threshold: Output([(0,",
"\"VDD\")}), (\"vdd_oct_en_l2\", \"U20\", \"OUT7\", {(\"U34\", \"EN_2\")}), (\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}),",
"[partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"},",
"req = {} for node, _ in inputs: node_req = node.bus_req_off() for wire,",
"1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) B_CDV_1V8",
"{(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\",",
"\"\"), ({mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\")",
"({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1200, 1200)], ddr13_2v5",
"req def construct_dependency(self, name, req): return (SET.Implicit, [set(), set(), set(), set()], lambda states,",
"(\"U26\", \"EN_VTT\"), (\"U30\", \"EN_VTT\"), (\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"),",
"#1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900,",
"after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0, 0)], {\"VR_ON\":",
"name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\", name)) VMON5 =",
"= Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)], \"power\") VCCO_VCC_DDR24 = Input([(0, 3400)],",
": [(0, 14000)]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\"))) return outputs class ISL(Node): implicit_off = {\"VCC\"",
"= Input([{0, 1}], \"logical\") V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]},",
"3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"), dependency_update= (Constraint.is_configured,",
"\"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR13\"), (\"U39\", \"VRI\")}), #vcco (\"vdd_ddrfpga24\", \"U47\", \"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"),",
"[(0, thresh-1)], \"EN\" : [{0}]} implicit_off_2 = lambda _, thresh : {\"VCC\" :",
"= False def generate_output(self, number): name = \"OUT\" + str(number) output = Output([{0,",
"CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)], \"power\") EN1 =",
"node.isppac_monitor(\"VMON9\", name)) VMON10 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11",
"\"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class",
"\"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}), (\"fpga_clk\", \"U16\", \"CLK\", {(\"fpga\", \"CLK\")}),",
"(\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\",",
"MAX15301(Node): implicit_off = {\"EN\": [{0}], \"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\":",
"\"power\") #not found in fpga boot sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L",
"\"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"), (\"U16\", \"CLK_IN\")}), (\"clock_flol\", \"bmc\", \"B_CLOCK_FLOL\", {(\"fpga\", \"CLK_OK\")}),",
"0)], en1 : [{0}], en2 : [{0}]}, { \"POWERED_ON\": [ ({en1 : [{0}]},",
"lambda node, name: node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda node, name:",
": [(0, 2500)]} def update(self, states): try: intersect(states[self.VOUT.name], [(500, 3040)]) self.is_default = True",
"[(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vtt : [(0,",
"Input([(0, 14000)], \"power\") V_OUT = lambda default : Output([(0, 5250)], [Constraint([], {\"EN\": [{1}],",
"(\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\", {(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\",",
"[{0, 1}], \"EN_VTT\" : [(0, 830)]} VCC = Input([(0, 6000)], \"power\") EN_PWR =",
"\"logical\") EN2 = Input([{0, 1}], \"logical\") states = (lambda vdd33, vdd, en1, en2:",
"= loop1 self.loop2 = loop2 self.l1_addr = l1_addr self.l2_addr = l2_addr self.implicit_off =",
"[(2400, 2600)], #not imposed by the cpu, but the connected DIMM SPD needs",
"p 75 (\"U37\", 0x72, MAX20751, [900, \"max20751_mgtavcc_fpga\"]), #MGTACC_FPGA p 85 (\"U41\", 0x73, MAX20751,",
"= Input([(0, 6000)], \"power\") SHDN = Input([{0, 1}], \"logical\") V_OUT = lambda _,",
"0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)],",
"\"VMON6\"), (\"cpu\", \"VDD_15\")}), (\"en_2v5_cpu13\", \"U20\", \"OUT9\", {(\"IC15\", \"V_EN\")}), #to NCP (\"2v5_cpu13\", \"IC15\", \"V_OUT\",",
"\"VREF\", \"implicit_off\"))], \"power\") def __init__(self, name, bus_addr): super(NCP, self).__init__(name, bus_addr, NCP) class MAX8869(Node):",
"boot sequ; filled in like VCCO_VCC_DDR voltages MGTVCCAUX_L = Input([(0, 1900)], \"power\") MGTVCCAUX_R",
"2), \"VCC\" : [(4750, 5250)], \"EN_PWR\" : [{1}], \"EN_VTT\" : [(870, 14000)]}, {},",
"MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]), #UTIL_3V3 p.90",
"lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8 = Input([(0, 5734)], \"monitor\", lambda node, name:",
"[(700, 800)], vttddr13 : [(700, 800)]}, \"%s should have stabilized by now\" %vdd),",
"[(0, 0)], aux : [(0, 0)], vcco : [(0, 0)], vadj : [(0,",
"[]), (\"U39\", 0x0, NCP, []), (\"U40\", 0x0, NCP, []), (\"U57\", 0x0, SI5395, [\"clk_main\"]),",
"[(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140,",
"states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default = True return except",
"name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\", name)) VMON3_ATT",
"\"POWERED_DOWN\", FPGA) #EVAL 3 version of the FPGA, comments indicate changes class FPGA_EVAL3(Stateful_Node):",
"{\"VR_ON\": [{1}], \"VDD33\":[(2970, 3630)], \"VDDH\":[(8500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_on), state_update= Constraint.default_state), Constraint([(500, 1520)],",
"EN_2 = Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)], \"power\") VOUT = lambda",
"3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400,",
"(\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p",
"Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(), set()))], \"logical\", Wire.gpio_set) C_PLL_DC_OK =",
"\"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga? additional vcco",
"node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\", name)) VMON8",
"(\"U30\", \"VID\")}), (\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\",",
"3630)], \"VIN\" : [(0, 13200)], \"EN_2\" : [{0}]}, partial(Constraint.implicit, \"VOUT_2\", \"implicit_off_2\")), ], \"power\",",
": [{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\":",
"(\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053, [2500]), #SYS_2V5_24 (\"IC15\", 0x0, MAX15053,",
"0)], ddr24_2v5 : [(0, 0)], ddr13 : [(0, 0)], ddr13_2v5 : [(0, 0)],",
"\"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class",
"[(2300, 2400)]}, \"wait until \" + vdd + \" stabilized\"), ({en1 : [{1}]},",
"partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU)",
"node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11\",",
"clk: [(0, 0), (3300, 3300), (50, 50)], vcc : [(873, 927)], io :",
"(Constraint.is_default, [partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\",",
"[\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3)",
"\"VMON5_ATT\"), (\"U40\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR24\")}), #add NCP nodes (\"vtt_ddrfpga24\", \"U40\", \"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}),",
"#2V5_CPU13 p 71 (\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA,",
"wire_name): def fun(value, states, node=self, wire=wire_name): if states[node.VS.name][0][0] > 2700 and states[node.VS.name][0][0] <",
"930)], vdd15 : [(1450, 1550)]}, \"\"), ({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400,",
"Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node:",
"\"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version",
"{(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\", \"V_OUT\", {(\"U35\", \"VMON11_ATT\"), (\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\",",
"set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"C_RESET_N\", set(),",
"1}], \"logical\") V_OUT = lambda _, default, threshold: Output([(0, default)], [Constraint([(default, default)], {\"V_IN\"",
"2)}, before_complete= {\"VCC\", \"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1}, {0,",
"\"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node):",
"= lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh +",
"commands = node.configure() commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" % ( wire, 0.00095 *",
"14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX20751.implicit_off)), ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device):",
"[(2400, 2600)], ddr13 : [(1425, 1575)], ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700,",
"0)], { \"VRI\" : [(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\"))],",
"[(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"},",
"def __init__(self, name, bus_addr): super(FPGA, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA) #EVAL 3 version of",
"pll_ddr2, pll_ddr13, sys_pll_ddr : { \"POWERED_DOWN\" : PowerState({ ok : [{0}], rst :",
"Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device = device self.default = default",
"except State_Space_Error: self.is_default = False try: intersect(states[self.VIN.name], [(self.threshold, 13200)]) except State_Space_Error: self.configured =",
"], \"power\", Wire.ir_set) VOUT_2 = lambda _, thresh : Output([(0, 3040)], [Constraint([(500, 3040)],",
": [{0}]}, \"\") ], \"POWERED_DOWN\": []}), \"POWERED_ON\" : PowerState({vdd33: [(3000, 4000)], vdd :",
"[{0}]}, partial(Constraint.implicit, \"VOUT\", \"implicit_off\")), ], \"power\", Wire.ir_set) VOUT_2 = lambda _, thresh :",
"self.is_default = True return except State_Space_Error: self.is_default = False try: intersect(states[self.VDD33.name], [(0, 2800)])",
"\"ir3581_loop_0v9_vdd_oct\", 0x60, 0x62]), #VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p",
"[{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000, 5000)], {},",
"name: node.isppac_monitor(\"VMON11\", name)) VMON11_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name))",
"##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], #####",
"Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300,",
"it to match the schematics, so I know which supplies to connect.. :')",
"[(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self, states): try: intersect(states[self.V_OUT.name],",
"FOR EVAL 3: ##### vcc_ddr13 : [(1200, 1200)], vcc_ddr24 : [(1200, 1200)], #############################",
"self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(int(voltage), int(voltage * 1.01)) super(MAX8869, self).__init__(name, bus_addr, MAX8869)",
"[(0, 0)], vdd15 : [(0, 0)], ddr24 : [(0, 0)], ddr24_2v5 : [(0,",
"\"VMON10\"), (\"U31\", \"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\",",
"+ 499, 2699))], \"SHDN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") SHDN =",
"\"EN\"), (\"main_psu\", \"EN\")}), (\"3v3_psup\", \"main_psu\", \"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\",",
"(3300, 3300), (0, 0)], vdd : [(0, 0)], vdd09 : [(0, 0)], vdd15",
"VDD33 = Input([(0, 4000)], \"power\") VDD = Input([(0, 2500)], \"power\") EN1 = Input([{0,",
"vcco, vadj, vcc_2v5_ddr13, vcc_2v5_ddr24, vcc_ddr13, vcc_ddr24, vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: {",
"False super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states): try: intersect(states[self.VOUT.name], [(500, 1600)]) self.is_default",
"False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured = False return def",
"en2 : [{0}]}, { \"POWERED_DOWN\": [ ({vdd: [(2300, 2400)]}, \"wait until \" +",
"node, name: node.isppac_monitor(\"VMON4\", name)) VMON4_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4_ATT\",",
"MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0, MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\",",
"import Node, Input, Output, Constraint, Wire, PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET,",
"[{1}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"OUT\", {\"EN\": [{0}]}))],",
"(\"IC4\", 0x10, MAX15301, [1500, \"max15301_15_vdd_oct\"]), #1V5_VDD_OCT p.70 (\"IC11\", 0x12, MAX15301, [1800, \"max15301_vadj_1v8\"]), #VADJ_1V8",
"device = \"ir3581\" bus = \"power\" BUS = Input([{0, 1}], \"bus\") #loop 1",
"device self.default = default self.is_default = False self.current = [(default, default)] self.V_OUT =",
"partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0, 3600)], \"VCC\" : [(0,",
"EN_VTT = Input([(0, 12000)], \"power\") VID = Input([{0, 1}, {0, 1}, {0, 1},",
"\" + vdd + \" stabilized\"), ({en1 : [{1}]}, \"\"), ({en2 : [{1}],",
"{0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\", Wire.vid_set) def __init__(self, name, bus_addr):",
"default)] self.V_OUT = MAX15301.V_OUT(default) super(MAX15301, self).__init__(name, bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500,",
"return {self.VIN.name : [(self.threshold, 13200)], self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name",
": [(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 :",
"self.configured = False return try: intersect(states[self.VCC.name], [(2900, 3630)]) except State_Space_Error: self.configured = False",
"\"power\") VCCO_VTT_DDR13 = Input([(0, 2000)], \"power\") #replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\")",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda",
"= Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 = Input([(0, 3400)], \"power\") VCCO_2V5_DDR13 = Input([(0, 3400)],",
"(0, 0)], vdd : [(0, 0)], vdd09 : [(0, 0)], vdd15 : [(0,",
"inputs: node_req = node.bus_req_off() for wire, state in node_req.items(): if not wire in",
"3300), (0, 50)], \"clock\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [",
"#but I adjusted it to match the schematics, so I know which supplies",
": [(1746, 1854)], mgtaux_r : [(1746, 1854)]}, \"\"), ({ok: [{1}]}, \"\") ], \"POWERED_ON\"",
"INA226, [\"ina226_ddr_fpga_24\"]), (\"U48\", 0x41, INA226, [\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226,",
"bus_addr, MAX15301) def bus_req(self): return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0,",
"i in range(8 - len(binary))) for i in binary: multidim.append({int(i)}) return multidim def",
"__init__(self, name, bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL) def update(self, states):",
"\"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\", \"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def",
"= {\"EN\"}), partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\", \"EN\"}, set())])), Constraint([(0, 0)], {\"EN\": [{0}], \"V_PWR\":",
"(\"main_psu\", 0x0, Main_PSU, []), (\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}], [ Constraint([{0, 1}, {0, 1},",
"name, bus_addr, threshold, device, loop1, loop2, l1_addr, l2_addr): self.configured = False self.is_default =",
"SET, empty_intersection import math from functools import partial import z3 class INA226(Node): BUS",
"4400)]} def update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default =",
"vdd15 : [(1450, 1550)], ddr13 : [(1425, 1575)], ddr24: [(1425, 1575)], ddr24_2v5 :",
"0)], vdd : [(0, 0)], en1 : [{0}], en2 : [{1}]}, { \"POWERED_ON\":",
"- i * 6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i +",
"\"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN",
"2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140, 3400)],",
"###################################### ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"), ({rst :",
"Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(2600,",
"1, 2699)))], \"V_EN\" : [{0}]} V_IN = Input([(0, 6000)], \"power\") V_EN = Input([{0,",
"[(700, 800)], io33 : [(3140, 3460)], }, { \"POWERED_DOWN\": [ ({clk: [(0, 0),",
"in inputs: node_req = node.bus_req_off() for wire, state in node_req.items(): if not wire",
"Output([{0, 1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N =",
"[(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until \" + vdd + \" stabilized\"),",
"85 (\"U41\", 0x73, MAX20751, [1200,\"max20751_mgtavtt_fpga\"]), #MGTAVTT_FPGA p 87 (\"U51\", 0x70, MAX20751, [900, \"max20751_vccint_fpga\"]),",
"\"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP",
"\"BUS\"}, {\"VR_ON\"}, after_set = {\"VR_ON\"}), partial(Constraint.explicit, \"V_OUT\", {\"VDD33\", \"VDDH\", \"BUS\", \"VR_ON\"}, set())])), Constraint([(0,",
"INA226(Node): BUS = Input([{0, 1}], \"bus\") VS = Input([(0, 6000)], \"power\") VBUS =",
"+ 500, 2700), 5500)], \"SHDN\" : [{1}]} implicit_off = lambda _, thresh: {\"V_IN\"",
"[(0, 0)]}, \"\"), ({vtt : [(0, 0)]}, \"\"), ({mgtavcc : [(0, 0)]}, \"\"),",
"True except State_Space_Error: self.is_default = False class IR(Node): implicit_off = lambda _, thresh",
"(\"cpu\", \"VDD_2V5_DDR13\")}), (\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\",",
"######### #({ddr24 : [(1425, 1575)], ddr24_2v5 : [(2400, 2600)], ddr13 : [(1425, 1575)],",
"0)], {\"VRI\" : [(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0,",
"#2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA_EVAL3, []), (\"cpu\", 0x0, ThunderX_EVAL3, []), (\"bmc\", 0x0,",
"[(0, 0)]}, \"\"), ({vcco: [(0, 0)], vcc_2v5_ddr13 : [(0, 0)], vcc_2v5_ddr24 : [(0,",
"MAX15301.implicit_off)) ], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device = device",
"\"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"), (\"U34\", \"VCC\"),",
"\"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node):",
"\"OUT14\", {(\"U41\", \"VR_ON\")}), (\"mgtavtt_fpga\", \"U41\", \"V_OUT\", {(\"fpga\", \"MGTAVTT\")}), (\"en_mgtavcc_fpga\", \"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}),",
"[([(0, 12000)], {}, [], lambda node: node.indep(\"OUT0\"))], \"power\") OUT1 = Output([(0, 12000)], [([(0,",
"{(\"U30\", \"EN_PWR\")}), (\"vdd_ddrcpu13\", \"U26\", \"VOUT\", {(\"U20\", \"VMON9\"), (\"U27\", \"VBUS\"), (\"cpu\", \"VDD_DDR13\"), (\"U24\", \"VRI\")}),",
"3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)],",
"\"U35\", \"OUT10\", {(\"U37\", \"VR_ON\")}), (\"mgtavcc_fpga\", \"U37\", \"V_OUT\", {(\"U35\", \"VMON7\"), (\"fpga\", \"MGTAVCC\")}), (\"en_vccint_fpga\", \"U35\",",
"(\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\" : [(2375, 5500)]}, partial(Constraint.implicit,",
"\"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 = Input([(0, 4000)], \"power\") VDD = Input([(0,",
"Input([{0, 1}], \"logical\") VIN = Input([(0, 13200)], \"power\") VOUT = lambda _, thresh",
"[{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}, {0,",
"(\"0v9_vdd_oct\", \"U34\", \"VOUT_2\", {(\"U20\", \"VMON5\"), (\"cpu\", \"VDD_09\")}), (\"en_1v5_vdd_oct\", \"U20\", \"OUT8\", {(\"IC4\", \"EN\")}), (\"1v5_vdd_oct\",",
"[(self.default, self.default)] return except State_Space_Error: pass class Oscillator(Node): VDD = Input([(0, 3600)], \"power\")",
"\"must have written pll_mul and sys_pll_mul beforehand\"), ({rst : [{1}]}, \"\") ], \"POWERED_ON\"",
"\"power\") CLK = Output([(0, 0), (3300, 3300), (0, 50)], [ Constraint([(0, 0), (3300,",
"\"power\") def __init__(self, name, bus_addr): super(PSU, self).__init__(name, bus_addr, PSU) class Main_PSU(Node): EN =",
"{\"EN\": [{0}]}))], \"power\") V12_PSU = Output([(0, 12000)], [ Constraint([(12000, 12000)], {\"EN\" : [{1}]},",
"\"\"), ({aux : [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP ###### #({vcco :",
"{\"VCC\" : [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" :",
"vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vadj:",
"EVAL 3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5 : [(2400, 2600)], ddr13 :",
"(0, 0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0), (3300, 3300),",
"{\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}, partial(Constraint.implicit, \"CLK\", \"implicit_on\"),",
"\"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off =",
"[1800, \"max15301_vadj_1v8\"]), #VADJ_1V8 p.91 (\"IC13\", 0x15, MAX15301, [900, \"max15301_vccintio_bram_fpga\"]), #VCCINTIO_FPGA p 99 (\"U34\",",
"update(self, states): try: intersect(states[self.V_OUT.name], [(600, 5250)]) self.current = states[self.V_OUT.name] self.is_default = True return",
"node.isppac_monitor(\"VCCINP\", name)) VCC = Input([(0, 4500)], \"power\", lambda node, name: node.isppac_monitor(\"VCCA\", name)) def",
"1854)]}, \"\"), ##### REGULAR TRANSITION STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 :",
"#replace VREF VCCO_VTT_DDR24 = Input([(0, 2000)], \"power\") VCCO_VCC_DDR13 = Input([(0, 3400)], \"power\") VADJ_1V8",
"3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\":",
"5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)],",
"\"logical\") CHIP_RESET_L = Input([{0, 1}], \"logical\") PLL_REF_CLK = Input([(0, 0), (3300, 3300), (0,",
")) else: commands.append(\"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='%s')\" % ( wire_name, 0.00095 * list(value[0])[0]",
"= Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0,",
"\"MGTAVTT\", \"MGTVCCAUX_L\", \"MGTVCCAUX_R\", \"MGTAVCC\"]) def __init__(self, name, bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3)",
"vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], ddr13 : [(1425, 1575)], ddr24:",
"\"clock\", Wire.clock_config) def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on",
"(\"IC12\", 0x11, MAX15301, [1800, \"max15301_vcc1v8_fpga\"]), #VCCIV8_FPGA p 92 (\"IC10\", 0x1B, MAX15301, [3300, \"max15301_util_3v3\"]),",
"self.implicit_off(threshold) self.implicit_off_2 = self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr,",
"1236)], mgtaux_l : [(1746, 1854)], mgtaux_r : [(1746, 1854)], mgtavcc : [(873, 927)],",
"self.VCC.name : [(2900, 3630)]} def bus_req_off(self): return {self.VIN.name : [(0, self.threshold-1)], self.VCC.name :",
"self.current = [(self.default, self.default)] return except State_Space_Error: pass class Oscillator(Node): VDD = Input([(0,",
"(\"pll_dc_ok\", \"bmc\", \"C_PLL_DC_OK\", {(\"cpu\", \"PLL_DC_OK\")}), (\"en_vdd_ddrcpu13\", \"U20\", \"OUT11\", {(\"U26\", \"EN_PWR\")}), (\"en_vdd_ddrcpu24\", \"U20\", \"OUT12\",",
"0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU = Output([(5000, 5000)], [Constraint([(5000,",
"(50, 50)]}, partial(Constraint.explicit, \"CLK\", {\"VDD\", \"CLK_IN\"}, set())), Constraint([(0, 0), (3300, 3300), (0, 0)],",
"should have stabilized by now\" %vdd), ###################################### ({ok : [{1}]}, \"must have written",
"[\"ina226_ddr_fpga_13\"]), (\"U27\", 0x44, INA226, [\"ina226_ddr_cpu_13\"]), (\"U31\", 0x45, INA226, [\"ina226_ddr_cpu_24\"]), #TODO: add real names",
"\"VCCO_VTT_DDR13\")}), (\"en_sys_2v5_24\", \"U35\", \"OUT8\", { (\"IC9\", \"V_EN\")}), (\"sys_2v5_24\", \"IC9\", \"V_OUT\", {(\"U35\", \"VMON5_ATT\"), (\"U40\",",
"[(2250, 5500)]}, partial(Constraint.explicit, name, {\"VCC\", \"VCC_IN\"}, set())), Constraint([{0}], {\"VCC\": [(0, 2600)], \"VCC_IN\": [(0,",
"(\"U20\", \"VMON2_ATT\"), (\"U26\", \"VCC\"), (\"U30\", \"VCC\"), (\"U43\", \"VCC\"), (\"U47\", \"VCC\")}), (\"5vsb_psup\", \"main_psu\", \"V5SB_PSU\",",
"Wire.vid_set) def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False def",
"name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125)) VMON2_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON2_ATT\",",
"vdd : [(940, 980)], vdd09 : [(870, 930)], vdd15 : [(1450, 1550)], #######",
"= \"<V_IN> V_IN\" V_IN = Input([(0, 12000)], \"power\") def __init__(self, name, bus_addr): super(PowerConsumer,",
"and FPGA to EVAL 3 versions enzian_nodes_EVAL3 = [ (\"power_bus\", 0x0, Bus, []),",
"[(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\", Wire.pin_set) setattr(self, name, output)",
"__init__(self, name, bus_addr): super(ThunderX_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX_EVAL3) class Bus(Node): BUS = Output([{0,",
"vtt_ddr13 : [(550, 1700)], vtt_ddr24 : [(550, 1700)], vtt : [(1164, 1236)], mgtaux_l",
"def __init__(self, name, bus_addr): super(Oscillator, self).__init__(name, bus_addr, Oscillator) class SI5395(Node): implicit_on = {\"VDD\":",
"Bus) def construct_req(self, inputs): req = {} for node, _ in inputs: unite_dict(req,",
"93 (\"U47\", 0x0, ISL, []), #DD_DDRFPGA24 p 95 (\"IC5\", 0x0, MAX8869, [1800]), #MGTVCCAUX_L,",
"5500)]}, partial(Constraint.implicit, \"VREF\", \"implicit_off\")), Constraint([(0, 0)], { \"VRI\" : [(0, 3600)], \"VCC\" :",
": [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN_2\" : [{1}], \"BUS\" : [{1}]},",
"0x0, Bus, []), (\"psu_cpu0\", 0x0, PSU, []), (\"psu_cpu1\", 0x0, PSU, []), (\"main_psu\", 0x0,",
"(\"U34\", \"VIN\")}), (\"12v_cpu1_psup\", \"psu_cpu1\", \"OUT\", {(\"U35\", \"VMON1_ATT\"), (\"U37\", \"VDDH\"), (\"U41\", \"VDDH\"), (\"IC10\", \"V_PWR\"),",
"self).__init__(name, bus_addr, ISPPAC) self.configured = False def generate_output(self, number): name = \"OUT\" +",
"\"VDD_2V5_DDR24\", \"VDD_DDR13\", \"VDD_2V5_DDR13\", \"VTT_DDR24\", \"VTT_DDR13\", \"VDD_IO33\"]) #\"VDD_IO25\", \"VDD_IO33\"]) def __init__(self, name, bus_addr): super(ThunderX_EVAL3,",
"NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\", \"VDD_2V5_DDR24\")}), (\"vtt_ddrcpu24\", \"U25\", \"VREF\",",
"[ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000, 4000)], vdd: [(2000, 2500)]}, \"wait until",
"= True return except State_Space_Error: self.is_default = False try: intersect(states[self.V_PWR.name], [(0, 4400)]) self.current",
"vcc : [(0, 0)], io : [(0, 0)], aux : [(0, 0)], vcco",
"node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7_ATT\",",
"PowerState, Stateful_Node, intersect, State_Space_Error, unite_dict, state_union, SET, empty_intersection import math from functools import",
"class NCP(Node): implicit_on = {\"VRI\" : [(868, 3600)], \"VCC\" : [(2375, 5500)]} implicit_off",
"node.isppac_monitor(\"VMON6\", name)) VMON7 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT",
"device): self.device = device self.configured = False super(SI5395, self).__init__(name, bus_addr, SI5395) def update(self,",
"[(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON1_ATT\", name, 0.4125))",
"927)], ok : [{1}] }, { \"POWERED_DOWN\" : [ ({clk: [(0, 0), (3300,",
"name, \"implicit_off\")), Constraint([{0}], {\"VCC\": [(0, 4500)], \"VCC_IN\": [(0, 2000)]}, partial(Constraint.implicit, name, \"implicit_off\"))], \"logical\",",
"device super(INA226, self).__init__(name, bus_addr, INA226) self.configured = False def ina_monitor(self, wire_name): def fun(value,",
"(\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_wires = [ (\"b_psup_on\",",
"(\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}), (\"vcc1v8_fpga\", \"IC12\",",
"#this is sys_1v8.... VCCINT = Input([(0, 1000)], \"power\") MGTAVCC = Input([(0, 1000)], \"power\")",
"[(2400, 2600)], ##### REGULAR VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140,",
": [] }), \"POWERED_ON\" : PowerState({ clk: [(0, 0), (3300, 3300), (50, 50)],",
"(3300, 3300), (0, 0)], {\"VDD\": [(0, 2599)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\")), Constraint([(0, 0), (3300,",
"Constraint([(0, 0), (3300, 3300), (0, 0)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300,",
"node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name, bus_addr, Bus) def",
"node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s', v_min=0, v_max=0.08, device='%s', monitor='%s')\" % ( wire_name,",
"], \"POWERED_ON\": []}) }, [\"VDD33\", \"VDD\", \"EN1\", \"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3,",
"def __init__(self, name, bus_addr): super(BMC, self).__init__(name, bus_addr, BMC) self.configured = False def configure(self):",
"1), ([(\"VREF\", 0), (\"VRI\", 0)]))]), Constraint([(0, 0)], {\"VRI\" : [(0, 868)], \"VCC\" :",
"[(0, 2600)], \"VCC_IN\": [(0, 2000)]} VMON1_ATT = Input([(0, 13900)], \"monitor\", lambda node, name:",
"self.implicit_off_2(threshold) self.VOUT = self.VOUT(threshold) self.VOUT_2 = self.VOUT_2(threshold) super(IR, self).__init__(name, bus_addr, IR) def bus_req(self):",
"req[wire]) print(req) return req def construct_dependency(self, name, req): return (SET.Implicit, [set(), set(), set(),",
"{\"VDD\": [(0, 2599)], \"CLK_IN\": [(0, 0), (3300, 3300), (0, 0)]} VDD = Input([(0,",
"50)], [ Constraint([(0, 0), (3300, 3300), (50, 50)], {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0,",
"return {self.V_PWR.name: [(5500, 14000)]} def bus_req_off(self): return {self.V_PWR.name: [(0, 4400)]} def update(self, states):",
"name, req): return (SET.Implicit, [set(), set(), set(), set()], lambda states, req = req:",
"bus_addr): super(FPGA_EVAL3, self).__init__(name, bus_addr, \"POWERED_DOWN\", FPGA_EVAL3) class ThunderX(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\")",
"[{1}], \"V_PWR\": [(5500, 14000)]}, partial(Constraint.implicit, \"V_OUT\", MAX15301.implicit_on), state_update = Constraint.default_state), Constraint([(600, 5250)], {\"EN\":",
"\"VMON12\"), (\"fpga\", \"VCCO_1V8\")}), #where to connect at fpga? additional vcco thingy? (\"en_sys_2v5_13\", \"U35\",",
"927)], io : [(873, 927)], aux : [(1746, 1854)], vcco : [(1746, 1854)],",
"(\"sys_2v5_13\", \"IC8\", \"V_OUT\", {(\"U35\", \"VMON4_ATT\"), (\"U39\", \"VCC\"), (\"fpga\", \"VCCO_2V5_DDR13\")}), #add NCP nodes (\"vtt_ddrfpga13\",",
"bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}], [Constraint([{0,",
"\"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"),",
"6.25) outputs.append( Constraint( [(voltage_min, voltage_max)], \\ {\"VID\" : binary_multidimensional(i + 2), \"VCC\" :",
"VBUS = Input([(0, 40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr,",
"Input([(0, 6000)], \"power\") #reference input VREF = Output([(0, 6000)], [Constraint([(435, 1800)], {\"VRI\" :",
"1854)], vcc_2v5_ddr13 : [(2400, 2600)], vcc_2v5_ddr24 : [(2400, 2600)], vcc_ddr13 : [(1140, 3400)],",
"({en2 : [{1}], vdd: [(2000, 2600)]}, \"\") ], \"POWERED_ON\": []}) }, [\"VDD\", \"EN1\",",
"\"EN2\"]) def __init__(self, name, bus_addr): super(CPU_3, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU_3) class PSU(Node): EN",
": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V12_PSU\", {\"EN\": [{0}]}))], \"power\") V5SB_PSU =",
"[\"CLK\", \"CLK_OK\", \"VCCINT\", \"VCCINT_IO\", \"VCCAUX\", \"VCCO_1V8\", \"VADJ_1V8\", \"VCCO_2V5_DDR13\", \"VCCO_2V5_DDR24\", \"VCCO_VCC_DDR13\", \"VCCO_VCC_DDR24\", \"VCCO_VTT_DDR13\", \"VCCO_VTT_DDR24\",",
"by now\" %vdd), #### FOR EVAL 3 ###################### ({ddr24 : [(1200, 1200)], ddr24_2v5",
"#EVAL 3 version of the Enzian nodes, only changes classes of ThunderX and",
"1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage * 1.01)) super(MAX15053,",
"[{0}], \"VDD33\": [0, 2800], \"VDDH\": [(0, 8499)]} VDD33 = Input([(0, 4000)], \"power\") BUS",
"{\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def",
"p 97 (\"U43\", 0x0, ISL, []), #VDD_DDRFPGA13 p 93 (\"U47\", 0x0, ISL, []),",
"Constraint([(0, 0)], { \"VRI\" : [(0, 3600)], \"VCC\" : [(0, 2374)]}, partial(Constraint.implicit, \"VREF\",",
"\"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device = device",
"vtt_ddr13, vtt_ddr24, vtt, mgtaux_l, mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0, 0),",
"True return [ \"init_device('%s', False)\" % self.device, \"init_device('%s', False)\" % self.loop1, \"init_device('%s', False)\"",
"VALUES: ####### #vcc_ddr13 : [(1140, 3400)], #vcc_ddr24 : [(1140, 3400)], ##### VALUES FOR",
"##### REGULAR TRANSITION STEP ###### #({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400, 2600)],",
"0)], vcc_ddr13 : [(0, 0)], vcc_ddr24 : [(0, 0)], vtt_ddr13 : [(0, 0)],",
"MAX20751.V_OUT(default) super(MAX20751, self).__init__(name, bus_addr, MAX20751) def bus_req(self): return {} def bus_req_off(self): return {}",
"name, bus_addr): super(Main_PSU, self).__init__(name, bus_addr, Main_PSU) class PowerSupply(Node): OUT0 = Output([(0, 12000)], [([(0,",
"(3300, 3300), (0, 0)]}, partial(Constraint.implicit, \"CLK\", \"implicit_off\"))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr,",
": [ ({clk: [(0, 0), (3300, 3300), (50, 50)]}, \"\"), ({vcc : [(873,",
"[(873, 927)]}, \"\"), ({io : [(873, 927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"),",
"node.indep(\"OUT2\"))], \"power\") def __init__(self, name, bus_addr): super(PowerSupply, self).__init__(name, bus_addr, PowerSupply) class ISPPAC(Node): implicit_off",
"partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0, 1}], [Constraint([{0, 1}], {},",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)],",
"#VDD_CORE, VDD_OCT p 77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0,",
"(\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\", \"IC13\", \"V_OUT\",",
"\"logical\") VIN = Input([(0, 13200)], \"power\") VOUT = lambda _, thresh : Output([(0,",
"\"\"), ##### FOR EVAL 3: ################### ({vcco : [(1746, 1854)], vcc_2v5_ddr13 : [(2400,",
"[(0, 2599)]}, partial(Constraint.implicit, \"CLK\", {\"VDD\": [(0, 2599)]}))], \"clock\", Wire.clock_config) def __init__(self, name, bus_addr):",
"name = \"OUT\" + str(number) output = Output([{0, 1}], [Constraint([{1, 0}], {\"VCC\": [(2800,",
"\"POWERED_DOWN\" : [] } ), \"POWERED_ON\" : PowerState({ ok : [{1}], rst :",
"\"V_OUT\", \"implicit_off\")), ], \"power\") def __init__(self, name, bus_addr, voltage): self.implicit_on = self.implicit_on(int(voltage *",
"1}, {0, 1}, {0, 1}, {0, 1}], [ Constraint([{0, 1}, {0, 1}, {0,",
"implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0, 1}], \"EN_VTT\" : [(0,",
"[(700, 800)]}, \"%s should have stabilized by now\" %vdd), #### FOR EVAL 3",
"\"BUS\" : [{1}]}, {}, dependency_update = (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"},",
"\"OUT10\", {(\"IC16\", \"V_EN\")}), #to NCP (\"2v5_cpu24\", \"IC16\", \"V_OUT\", {(\"U20\", \"VMON8_ATT\"), (\"U25\", \"VCC\"), (\"cpu\",",
"2700), 5500)], \"SHDN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0,",
"VDD = Input([(0, 3600)], \"power\") CLK_IN = Input([(0, 0), (3300, 3300), (0, 50)],",
"(\"b_fdv_1v8\", \"bmc\", \"B_FDV_1V8\", {(\"U43\", \"VID\"), (\"U47\", \"VID\")}), (\"c_reset_n\", \"bmc\", \"C_RESET_N\", {(\"cpu\", \"CHIP_RESET_L\")}), (\"pll_dc_ok\",",
"return outputs class ISL(Node): implicit_off = {\"VCC\" : [(0, 4300)], \"EN_PWR\" : [{0,",
"return req def construct_req_off(self, inputs): req = {} for node, _ in inputs:",
"40000)], \"monitor\", lambda node, name: node.ina_monitor(name)) def __init__(self, name, bus_addr, device): self.device =",
": [(2900, 3630)], \"VIN\" : [(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {},",
"return fun def configure(self): if self.configured: return [] else: self.configured = True return",
"vcc_ddr13 : [(1140, 3400)], vcc_ddr24 : [(1140, 3400)], vtt_ddr13 : [(550, 1700)], vtt_ddr24",
"clk : [(0, 0), (3300, 3300), (0, 0)], vdd : [(0, 0)], vdd09",
"(\"U20\", 0x60, ISPPAC, [\"pac_cpu\"]), #cpu ISPPAC (\"U35\", 0x61, ISPPAC, [\"pac_fpga\"]), #fpga ISPPAC (\"U44\",",
"[1800]), #MGTVCCAUX_L, p 88 (\"IC6\", 0x0, MAX8869, [1800]), #MGTVCCAUX_R, p 88 (\"IC7\", 0x0,",
"0), (3300, 3300), (0, 0)]} VDD = Input([(0, 3600)], \"power\") CLK_IN = Input([(0,",
"0)], vtt_ddr13 : [(0, 0)], vtt_ddr24 : [(0, 0)], vadj: [(0, 0)]}, \"\"),",
"2.5 V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(700, 800)], vttddr13 : [(700,",
"False)\" % (self.device) ] #EVAL 3 version of the Enzian nodes, only changes",
"[]), (\"U24\", 0x0, NCP, []), (\"U25\", 0x0, NCP, []), (\"U39\", 0x0, NCP, []),",
"0x0, SI5395, [\"clk_cpu\"]), (\"U16\", 0x0, SI5395, [\"clk_fpga\"]), (\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes",
"= self.implicit_on(int(voltage * 1.01)) self.implicit_off = self.implicit_off(int(voltage * 1.01)) self.V_OUT = self.V_OUT(voltage, int(voltage",
"class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\") CLK_OK =",
"VDD = Input([(0, 1210)], \"power\") VDD_09 = Input([(0, 945)], \"power\") VDD_15 = Input([(0,",
"bus_addr, ISPPAC) self.configured = False def generate_output(self, number): name = \"OUT\" + str(number)",
"[(0, 0), (3300, 3300), (50, 50)], io33 : [(3140, 3460)]}, \"wait for %s",
"\"POWERED_ON\" : [] }) }, [\"PLL_DC_OK\", \"CHIP_RESET_L\", \"PLL_REF_CLK\", \"VDD\", \"VDD_09\", \"VDD_15\", \"VDD_DDR24\", \"VDD_2V5_DDR24\",",
"(\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\", \"U35\", \"OUT15\", {(\"IC12\", \"EN\")}),",
"_, node=self, wire_name=wire_name, pinname=pinname, multiplier=multiplier): commands = node.configure() if list(value[0])[0] == 0: commands.append(\"wait_for_voltage('%s',",
"return [ \"init_device('%s', False)\" % (self.device) ] class Clock(Node): CLK = Output([(0, 3300),",
"self.default = default self.is_default = False self.current = [(default, default)] self.V_OUT = MAX15301.V_OUT(default)",
"0), (3300, 3300), (0, 50)], \"clock\") CLK_OK = Input([{0, 1}], \"logical\") VCCO_2V5_DDR24 =",
"else: self.configured = True return [ \"init_device('isl6334d_ddr_v', False)\" ] class CPU_3(Stateful_Node): VDD33 =",
"isl_outputs(), \"power\") def __init__(self, name, bus_addr): self.is_default = False super(ISL, self).__init__(name, bus_addr, ISL)",
"if not wire in req: req[wire] = state else: req[wire] = state_union(state, req[wire])",
"\"EN_VTT\", \"EN_PWR\"})]))) outputs.append(Constraint([(0, 0)], {\"VID\": [{0, 1}, {0, 1}, {0, 1}, {0, 1},",
"req def construct_req_off(self, inputs): req = {} for node, _ in inputs: node_req",
"1}], [Constraint([{0, 1}], {}, partial(Constraint.explicit, \"B_PSUP_ON\", set(), set()))], \"logical\", Wire.gpio_set) C_RESET_N = Output([{0,",
"], \"power\", Wire.voltage_set) def __init__(self, name, bus_addr, default, device): self.device = device self.default",
"[Constraint([(default, default)], {\"V_IN\" : [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit,",
"\"VIN\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit, \"VOUT\", {\"VCC\", \"VIN\", \"BUS\", \"EN\"}, set())])),",
"\"V33_PSU\", {(\"U20\", \"VMON3_ATT\"), (\"U27\", \"VS\"), (\"U31\", \"VS\"), (\"IC15\", \"V_IN\"), (\"IC16\", \"V_IN\"), (\"cpu\", \"VDD_IO33\"),",
"\"VBUS\"), (\"cpu\", \"VDD_DDR24\"), (\"U25\", \"VRI\")}), (\"vdd_core_en\", \"U20\", \"OUT6\", {(\"U34\", \"EN\")}), (\"vdd_core\", \"U34\", \"VOUT\",",
"indicate changes class FPGA_EVAL3(Stateful_Node): CLK = Input([(0, 0), (3300, 3300), (0, 50)], \"clock\")",
"MAX15053, [1800]), #SYS_1V8, p 89 (\"IC8\", 0x0, MAX15053, [2500]), #SYS_2V5_13 (\"IC9\", 0x0, MAX15053,",
"class Bus(Node): BUS = Output([{0, 1}], [ Constraint([{1}], lambda node, inputs: node.construct_req(inputs), lambda",
"{(\"U35\", \"VMON9\"), (\"fpga\", \"MGTVCCAUX_R\")}), (\"en_vadj_1v8_fpga\", \"U35\", \"OUT17\", {(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\",",
"[(0, 0)]}, \"\"), ({io : [(0, 0)]}, \"\"), ({vcc : [(0, 0)]}, \"\"),",
"\"IC13\", \"V_OUT\", {(\"U35\", \"VMON10\"), (\"fpga\", \"VCCINT_IO\")}), (\"en_vdd_ddrfpga13\", \"U35\", \"OUT18\", {(\"U43\", \"EN_PWR\")}), (\"en_vdd_ddrfpga24\", \"U35\",",
")) return (True, \"\\n\".join(commands)) else: return (False, \"wait_for_voltage('%s', v_min=%.3f, v_max=%.3f, device='%s', monitor='VOLTAGE')\" %",
"x: not empty_intersection(x, req, states), req.keys()))}) class MAX20751(Node): implicit_on = {\"VR_ON\": [{1}], \"VDD33\":[(2970,",
"return except State_Space_Error: pass class Oscillator(Node): VDD = Input([(0, 3600)], \"power\") CLK =",
"node, name, inputs: node.construct_dependency(name, node.construct_req_off(inputs))) ], \"bus\") def __init__(self, name, bus_addr): super(Bus, self).__init__(name,",
"(\"vtt_ddrcpu13\", \"U24\", \"VREF\", {(\"U20\", \"VMON11\"), (\"cpu\", \"VTT_DDR13\")}), (\"en_2v5_cpu24\", \"U20\", \"OUT10\", {(\"IC16\", \"V_EN\")}), #to",
"1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_CDV_1V8\", set(), set()))], \"logical\",",
"V33_PSU = Output([(0, 3300)], [ Constraint([(3300, 3300)], {\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})),",
"name, bus_addr): super(CPU2, self).__init__(name, bus_addr, \"POWERED_DOWN\", CPU2) class BMC(Node): B_CLOCK_FLOL = Output([{0, 1}],",
"] enzian_wires = [ (\"b_psup_on\", \"bmc\", \"B_PSUP_ON\", {(\"psu_cpu0\", \"EN\"), (\"psu_cpu1\", \"EN\"), (\"main_psu\", \"EN\")}),",
"\"%s should have stabilized by now\" %vdd), ###################################### ({ok : [{1}]}, \"must have",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON11_ATT\", name)) VMON12 = Input([(0, 5734)],",
"\"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)], \"monitor\", lambda node,",
"V ddr13_2v5 : [(2400, 2600)], vttddr24 : [(570, 630)], vttddr13 : [(570, 630)],",
"\"VREF\", {(\"fpga\", \"VCCO_VTT_DDR24\")}), (\"clk_sig\", \"oscillator\", \"CLK\", {(\"U57\", \"CLK_IN\")}), (\"clk_main\", \"U57\", \"CLK\", {(\"U11\", \"CLK_IN\"),",
"(\"oscillator\", 0x0, Oscillator, []), ] enzian_nodes = [ (\"power_bus\", 0x0, Bus, []), (\"psu_cpu0\",",
"(\"fpga\", \"VCCAUX\")}), (\"en_util33\", \"U35\", \"OUT6\", {(\"IC10\", \"EN\")}), (\"util33\", \"IC10\", \"V_OUT\", {(\"U35\", \"VMON3_ATT\"), (\"U44\",",
"by now\" %vdd), ({ok : [{1}]}, \"must have written pll_mul and sys_pll_mul beforehand\"),",
"SI5395(Node): implicit_on = {\"VDD\": [(2600, 3600)], \"CLK_IN\": [(0, 0), (3300, 3300), (50, 50)]}",
"True return [ \"init_device('%s', False)\" % (self.device) ] class Clock(Node): CLK = Output([(0,",
"{\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))], \"power\") def __init__(self, name, bus_addr): super(Main_PSU, self).__init__(name,",
"{0, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}], {}, partial(Constraint.explicit, \"B_FDV_1V8\",",
"node, name: node.isppac_monitor(\"VMON3_ATT\", name)) VMON4 = Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON4\",",
"5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON10\", name)) VMON11 = Input([(0, 5734)], \"monitor\", lambda",
"927)]}, \"\"), ({aux : [(1746, 1854)]}, \"\"), ##### REGULAR TRANSITION STEP ###### #({vcco",
"(\"fpga\", \"VCCINT\")}), (\"en_sys_1v8\", \"U35\", \"OUT16\", {(\"IC7\", \"V_EN\")}), (\"sys_1v8\", \"IC7\", \"V_OUT\", {(\"U35\", \"VMON12\"), (\"fpga\",",
"{(\"IC11\", \"EN\")}), (\"vadj_1v8_fpga\", \"IC11\", \"V_OUT\", {(\"fpga\", \"VADJ_1V8\")}), (\"en_vccintio_bram_fpga\", \"U35\", \"OUT13\", {(\"IC13\", \"EN\")}), (\"vccintio_bram_fpga\",",
"####### REGULAR VALUES ######### #ddr13 : [(1425, 1575)], #ddr24: [(1425, 1575)], ####### FOR",
"77 (\"U26\", 0x0, ISL, []), #VDD_DDRCPU13 p 73 (\"U30\", 0x0, ISL, []), #VDD_DDRCPU24",
"(\"IC16\", 0x0, MAX15053, [2500]), #2V5_CPU24 p 71 (\"fpga\", 0x0, FPGA, []), (\"cpu\", 0x0,",
"Input([(0, 2000)], \"power\") states = (lambda clk, ok, vcc, io, aux, vcco, vadj,",
": [(0, 0), (3300, 3300), (0, 0)], vdd : [(0, 0)], vdd09 :",
"\"VIN\" : [(thresh, 13200)], \"EN\" : [{1}], \"BUS\": [{1}]}, {}, dependency_update = (Constraint.is_default,",
"= Output([(0, 12000)], [([(0, 12000)], {}, [], lambda node: node.indep(\"OUT2\"))], \"power\") def __init__(self,",
"= (Constraint.is_default, [partial(Constraint.explicit, \"VOUT_2\", {\"VCC\", \"VIN\", \"BUS\"}, {\"EN_2\"}, after_set = {\"EN_2\"}), partial(Constraint.explicit, \"VOUT_2\",",
"__init__(self, name, bus_addr): super(ThunderX, self).__init__(name, bus_addr, \"POWERED_DOWN\", ThunderX) #EVAL 3 version of the",
"mgtaux_r, mgtavcc: { \"POWERED_DOWN\" : PowerState({ clk: [(0, 0), (3300, 3300), (0, 0)],",
"{\"EN\": [{1}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\": [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V33_PSU\", {\"EN\":",
"\"VOUT\", {(\"U44\", \"VBUS\"), (\"fpga\", \"VCCO_VCC_DDR24\"), (\"U40\", \"VRI\")}), (\"b_cdv_1v8\", \"bmc\", \"B_CDV_1V8\", {(\"U26\", \"VID\"), (\"U30\",",
"V_OUT = lambda _, default, thresh: Output([(0, thresh)], [Constraint([(default, default)], {\"V_IN\" : [(max(thresh",
"#not imposed by the cpu, but the connected DIMM SPD needs 2.5 V",
"\"V_PWR\": [(0, 4400)]} implicit_on = {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)]} BUS = Input([{0,",
": [{0}]} device = \"ir3581\" bus = \"power\" BUS = Input([{0, 1}], \"bus\")",
": [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit, \"V_OUT\", {\"V_PWR\", \"BUS\"}, {\"EN\"}, after_set = {\"EN\"}), partial(Constraint.explicit,",
": [{1}]} implicit_off = lambda _, thresh: {\"V_IN\" : [(0, max(thresh + 499,",
"\"\"), ({aux : [(0, 0)]}, \"\"), ({io : [(0, 0)]}, \"\"), ({vcc :",
"indicate changes class ThunderX_EVAL3(Stateful_Node): PLL_DC_OK = Input([{0, 1}], \"logical\") CHIP_RESET_L = Input([{0, 1}],",
"= Input([(0, 5734)], \"monitor\", lambda node, name: node.isppac_monitor(\"VMON7\", name)) VMON7_ATT = Input([(0, 5734)],",
"\"V_OUT\", \"implicit_on\")), Constraint([(0, 0)], {\"V_IN\" : [(0, 5500)], \"V_EN\" : [{0}]}, partial(Constraint.implicit, \"V_OUT\",",
": [(max(int(threshold * 1.06), 2700), 5500)], \"V_EN\" : [{1}]}, partial(Constraint.implicit, \"V_OUT\", \"implicit_on\")), Constraint([(0,",
"(\"IC11\", \"V_PWR\"), (\"IC12\", \"V_PWR\"), (\"U43\", \"EN_VTT\"), (\"U47\", \"EN_VTT\"), (\"U51\", \"VDDH\"), (\"IC13\", \"V_PWR\")}), (\"en_vcc1v8_fpga\",",
"= bin(decimal)[2:] #remove 0b prefix multidim = list({0} for i in range(8 -",
"[{1}], en2 : [{0}]}, { \"POWERED_DOWN\": [ ({en2 : [{1}]}, \"\"), ({vdd33: [(3000,",
"Constraint([(600, 5250)], {\"EN\": [{1}], \"V_PWR\": [(5500, 14000)], \"BUS\" : [{1}]}, {}, dependency_update=(Constraint.is_default, [partial(Constraint.explicit,",
"vdd, vdd09, vdd15, ddr24, ddr24_2v5, ddr13, ddr13_2v5, vttddr24, vttddr13, io33 : { #pll_vdd,",
"partial(Constraint.implicit, \"V5_PSU\", {\"EN\" : [{1}]})), Constraint([(0, 0)], {\"EN\": [{0}]}, partial(Constraint.implicit, \"V5_PSU\", {\"EN\": [{0}]}))],",
"[(550, 1700)], vtt_ddr24 : [(550, 1700)], vadj: [(1746, 1845)]}, \"\"), ##### FOR EVAL",
": [(0, 0)]}, \"\"), ({io : [(0, 0)]}, \"\"), ({vcc : [(0, 0)]},",
"= Input([(0, 6000)], \"power\") V_EN = Input([{0, 1}], \"logical\") V_OUT = lambda _,",
"__init__(self, name, bus_addr): super(PowerConsumer, self).__init__(name, bus_addr, PowerConsumer) class CPU2(Stateful_Node): VDD = Input([(0, 2600)],"
] |
[
"self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict sorted_scores",
"source == 'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard",
"delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters)",
"vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。",
"指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter);",
"xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x] =",
"0.: return 0. return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。",
"codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source",
"def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1,",
"分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word in",
"=self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))]",
"self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None # 2维列表 self.words_no_stop_words",
"'[', ', \\''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl),",
"= list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word in words] vector2 =",
"is a dict sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True) #",
"''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None # 2维列表",
"''' import networkx as nx from Segmentation import Segmentation import numpy as np",
"''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count = 0 for sentence in",
">= sentence_min_len: result.append(sentence) count += 1 return result if __name__ == '__main__': import",
"wl in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_no_stop_words:",
"/ denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words",
"== 'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num",
"in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print",
"sentence in self.key_sentences: if count >= num: break if len(sentence) >= sentence_min_len: result.append(sentence)",
"speech_tag_filter=speech_tag_filter); # - # print self.sentences if source == 'no_filter': source = self.words_no_filter",
"in self.key_sentences: if count >= num: break if len(sentence) >= sentence_min_len: result.append(sentence) count",
"= nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(), key = lambda",
"''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。",
"> 0: # self.graph[x, :] = self.graph[x, :] / row_sum # print self.graph",
"TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl",
"默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters)",
"xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x,",
"if count >= num: break if len(sentence) >= sentence_min_len: result.append(sentence) count += 1",
"= [] count = 0 for sentence in self.key_sentences: if count >= num:",
"nx from Segmentation import Segmentation import numpy as np import math class TextRank4Sentence(object):",
"\"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print",
"# print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a",
"print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1,",
"TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。",
"result = [] count = 0 for sentence in self.key_sentences: if count >=",
"scores = nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(), key =",
"denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words =",
"denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator == 0.: return 0.",
"text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True,",
"item: item[1], reverse=True) # print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) #",
"None self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph",
"num: break if len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return result if",
"sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y",
"+ word_list2)) vector1 = [float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word)) for",
"None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph = None self.key_sentences",
"= None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg =",
"= self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num):",
"self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if source ==",
"if denominator == 0.: return 0. return co_occur_num / denominator def _gen_vectors(self, word_list1,",
"-*- ''' Created on Dec 1, 2014 @author: letian ''' import networkx as",
"= nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(),",
"2014 @author: letian ''' import networkx as nx from Segmentation import Segmentation import",
"`sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower,",
"''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x] for",
"''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); #",
"'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ', \\''.join(wl),",
"self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter",
"默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2",
"co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 '''",
"for word in words] return vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len",
"None def train(self, text, lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func =",
"lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。",
"= None self.graph = None self.key_sentences = None def train(self, text, lower =",
"0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator == 0.: return",
"= [float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word)) for word in words]",
"import math class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。",
"train(self, text, lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): '''",
"else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num,",
"stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg",
"lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if source == 'no_filter': source =",
"for y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity",
"def train(self, text, lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'):",
"[] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print",
"word in words] vector2 = [float(word_list2.count(word)) for word in words] return vector1, vector2",
"'\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2",
"self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph =",
"__name__ == '__main__': import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text =",
"in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if row_sum > 0: #",
"in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_all_filters: print",
"self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x",
"if __name__ == '__main__': import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text",
"`stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences =",
"vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x",
"for num in vector3 if num > 0.] co_occur_num = sum(vector4) # print",
"row_sum = np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x, :] =",
"0. return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2:",
"index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): '''",
"for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2):",
"= [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - #",
"''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences",
"if row_sum > 0: # self.graph[x, :] = self.graph[x, :] / row_sum #",
"1 return result if __name__ == '__main__': import codecs # text = codecs.open('../text/03.txt',",
"row_sum > 0: # self.graph[x, :] = self.graph[x, :] / row_sum # print",
"self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if source == 'no_filter': source",
"for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if row_sum >",
"source == 'no_filter': source = self.words_no_filter elif source == 'all_filters': source = self.words_all_filters",
"== 'no_filter': source = self.words_no_filter elif source == 'all_filters': source = self.words_all_filters else:",
"return vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。",
"- # print self.sentences if source == 'no_filter': source = self.words_no_filter elif source",
"letian ''' import networkx as nx from Segmentation import Segmentation import numpy as",
":] / row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) #",
"_get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2)",
"# if row_sum > 0: # self.graph[x, :] = self.graph[x, :] / row_sum",
"= None self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None",
"self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None",
"= None self.words_all_filters = None self.graph = None self.key_sentences = None def train(self,",
"math class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。",
"# print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def",
"lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print",
"None self.words_all_filters = None self.graph = None self.key_sentences = None def train(self, text,",
"reverse=True) # print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences)",
"tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_all_filters: print '[',",
"None self.graph = None self.key_sentences = None def train(self, text, lower = False,",
"# for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if row_sum",
"import Segmentation import numpy as np import math class TextRank4Sentence(object): def __init__(self, stop_words_file",
"= self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if source == 'no_filter':",
"in vector3 if num > 0.] co_occur_num = sum(vector4) # print co_occur_num if",
"= 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count = 0 for",
"# - # print self.sentences if source == 'no_filter': source = self.words_no_filter elif",
"speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter,",
"result if __name__ == '__main__': import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read()",
"> 0.] co_occur_num = sum(vector4) # print co_occur_num if co_occur_num == 0.: return",
"/ row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this",
"''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1,",
"= len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y in",
"source[y]) self.graph[x, y] = similarity self.graph[y, x] = similarity # for x in",
"self.words_no_filter elif source == 'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words sim_func",
"= self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph",
"wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_all_filters:",
"'no_filter': source = self.words_no_filter elif source == 'all_filters': source = self.words_all_filters else: source",
"1, 2014 @author: letian ''' import networkx as nx from Segmentation import Segmentation",
"elif source == 'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words sim_func =",
"words] vector2 = [float(word_list2.count(word)) for word in words] return vector1, vector2 def get_key_sentences(self,",
"class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。",
"6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count = 0 for sentence",
"= [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1 for num in vector3",
"self.words_all_filters = None self.graph = None self.key_sentences = None def train(self, text, lower",
"self.graph = None self.key_sentences = None def train(self, text, lower = False, speech_tag_filter=True,",
"from Segmentation import Segmentation import numpy as np import math class TextRank4Sentence(object): def",
"word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3",
"+ math.log(float(len(word_list2))) # 分母 if denominator == 0.: return 0. return co_occur_num /",
"Dec 1, 2014 @author: letian ''' import networkx as nx from Segmentation import",
"len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return result if __name__ == '__main__':",
"y] = similarity self.graph[y, x] = similarity # for x in xrange(sentences_num): #",
"word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2))",
"def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1",
"self.graph[x, :] / row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph)",
"'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num =",
"self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # -",
"vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1 for",
"= math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator == 0.: return 0. return",
"# this is a dict sorted_scores = sorted(scores.items(), key = lambda item: item[1],",
"vector3 if num > 0.] co_occur_num = sum(vector4) # print co_occur_num if co_occur_num",
"words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter,",
"= 0 for sentence in self.key_sentences: if count >= num: break if len(sentence)",
"''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1 =",
"if co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母",
"None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file,",
"'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) =",
"vector1 = [float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word)) for word in",
"num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = []",
"sentence_min_len: result.append(sentence) count += 1 return result if __name__ == '__main__': import codecs",
"source = self.words_no_filter elif source == 'all_filters': source = self.words_all_filters else: source =",
"self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph =",
"self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if source",
"sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words',",
"'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`:",
"6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count =",
"== '__main__': import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\"",
"dict sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True) # print sorted_scores",
"in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x]",
"word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for",
"num > 0.] co_occur_num = sum(vector4) # print co_occur_num if co_occur_num == 0.:",
"分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x]",
"text, lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。",
"= 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。",
"= \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1))",
"= TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for",
"获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count = 0 for sentence in self.key_sentences:",
"self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences if",
"this is a dict sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True)",
"'\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print for wl",
"两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word))",
"None self.key_sentences = None def train(self, text, lower = False, speech_tag_filter=True, source =",
"# print self.sentences if source == 'no_filter': source = self.words_no_filter elif source ==",
"== 0.: return 0. return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): '''",
"word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1",
"= False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。",
"in words] return vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len = 6):",
"[] count = 0 for sentence in self.key_sentences: if count >= num: break",
"'__main__': import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s",
"print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print for",
"return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator == 0.:",
"= codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True,",
"as np import math class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): '''",
"Segmentation import Segmentation import numpy as np import math class TextRank4Sentence(object): def __init__(self,",
"self.words_no_stop_words = None self.words_all_filters = None self.graph = None self.key_sentences = None def",
"= None # 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph = None",
"if len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return result if __name__ ==",
"= self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for",
">= num: break if len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return result",
"= sorted(scores.items(), key = lambda item: item[1], reverse=True) # print sorted_scores for index,",
"Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None # 2维列表 self.words_no_stop_words = None",
"sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True) # print sorted_scores for",
"vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in",
"= [float(word_list2.count(word)) for word in words] return vector1, vector2 def get_key_sentences(self, num =",
"denominator == 0.: return 0. return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2):",
"''' Created on Dec 1, 2014 @author: letian ''' import networkx as nx",
"sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1,",
"sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x] = similarity # for x",
"co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if",
"networkx as nx from Segmentation import Segmentation import numpy as np import math",
"print co_occur_num if co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2)))",
"= similarity self.graph[y, x] = similarity # for x in xrange(sentences_num): # row_sum",
":]) # if row_sum > 0: # self.graph[x, :] = self.graph[x, :] /",
"similarity # for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if",
"x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) # if row_sum > 0:",
"xrange(sentences_num): for y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] =",
"[float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word)) for word in words] return",
"text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print",
"import numpy as np import math class TextRank4Sentence(object): def __init__(self, stop_words_file = None,",
"self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter =",
"source = self.words_all_filters else: source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source)",
"若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = []",
"in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1,",
"def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。",
"count >= num: break if len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return",
"xrange(len(vector1))] vector4 = [1 for num in vector3 if num > 0.] co_occur_num",
"similarity self.graph[y, x] = similarity # for x in xrange(sentences_num): # row_sum =",
"x] = similarity # for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :])",
"sim_func = self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in",
"__init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。 `delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 '''",
"tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print '[',",
"False, speech_tag_filter=True, source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。",
"@author: letian ''' import networkx as nx from Segmentation import Segmentation import numpy",
"speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter:",
"= [1 for num in vector3 if num > 0.] co_occur_num = sum(vector4)",
"codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data')",
"= None def train(self, text, lower = False, speech_tag_filter=True, source = 'no_stop_words', sim_func",
"= similarity # for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x, :]) #",
"return 0. return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1,",
"2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph = None self.key_sentences = None",
"row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is",
"vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1 for num",
"+= 1 return result if __name__ == '__main__': import codecs # text =",
"for x in xrange(len(vector1))] vector4 = [1 for num in vector3 if num",
"print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_all_filters: print '[', ',",
"word in words] return vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len =",
"if source == 'no_filter': source = self.words_no_filter elif source == 'all_filters': source =",
"tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in",
"0 for sentence in self.key_sentences: if count >= num: break if len(sentence) >=",
"word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print",
"= None self.key_sentences = None def train(self, text, lower = False, speech_tag_filter=True, source",
"'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters')",
"\\''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print",
"for sentence in self.key_sentences: if count >= num: break if len(sentence) >= sentence_min_len:",
"0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator ==",
"vector2 def get_key_sentences(self, num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 '''",
"0.] co_occur_num = sum(vector4) # print co_occur_num if co_occur_num == 0.: return 0.",
"math.log(float(len(word_list2))) # 分母 if denominator == 0.: return 0. return co_occur_num / denominator",
"`lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences",
"as nx from Segmentation import Segmentation import numpy as np import math class",
"vector4 = [1 for num in vector3 if num > 0.] co_occur_num =",
"print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']'",
"print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self,",
"self.key_sentences = None def train(self, text, lower = False, speech_tag_filter=True, source = 'no_stop_words',",
"= np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y in xrange(x, sentences_num): similarity",
"for wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print for wl in",
"# 2维列表 self.words_no_stop_words = None self.words_all_filters = None self.graph = None self.key_sentences =",
"return co_occur_num / denominator def _gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表",
"= sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x] = similarity # for",
"= sum(vector4) # print co_occur_num if co_occur_num == 0.: return 0. denominator =",
"np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x, :] = self.graph[x, :]",
"co_occur_num if co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) #",
"words] return vector1, vector2 def get_key_sentences(self, num = 6, sentence_min_len = 6): '''",
"x in xrange(sentences_num): for y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x,",
"nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict sorted_scores =",
"in xrange(len(vector1))] vector4 = [1 for num in vector3 if num > 0.]",
"math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator == 0.: return 0. return co_occur_num",
"vector2 = [float(word_list2.count(word)) for word in words] return vector1, vector2 def get_key_sentences(self, num",
"= self.graph[x, :] / row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores =",
"result.append(sentence) count += 1 return result if __name__ == '__main__': import codecs #",
"'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter',",
"`source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences,",
"in xrange(sentences_num): for y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y]",
"word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) # print vector1, vector2 vector3 =",
"y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y,",
"#-*- encoding:utf-8 -*- ''' Created on Dec 1, 2014 @author: letian ''' import",
"return result if __name__ == '__main__': import codecs # text = codecs.open('../text/03.txt', 'r',",
"= 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ',",
"sentences_num): similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x] = similarity",
"', \\''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']'",
"for wl in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print for wl in",
"'\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[', ', \\''.join(wl), ']' print",
"key = lambda item: item[1], reverse=True) # print sorted_scores for index, _ in",
"<reponame>Mumuerr/textrank_test #-*- encoding:utf-8 -*- ''' Created on Dec 1, 2014 @author: letian '''",
"分母 if denominator == 0.: return 0. return co_occur_num / denominator def _gen_vectors(self,",
"a dict sorted_scores = sorted(scores.items(), key = lambda item: item[1], reverse=True) # print",
"break if len(sentence) >= sentence_min_len: result.append(sentence) count += 1 return result if __name__",
"= self.words_no_filter elif source == 'all_filters': source = self.words_all_filters else: source = self.words_no_stop_words",
"sentences_num)) for x in xrange(sentences_num): for y in xrange(x, sentences_num): similarity = sim_func(source[x],",
"import codecs # text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s =",
"def get_key_sentences(self, num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result",
"len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y in xrange(x,",
"vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1 for num in",
"# 分母 if denominator == 0.: return 0. return co_occur_num / denominator def",
"word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word",
"', \\''.join(wl), ']' print for wl in tr4s.words_all_filters: print '[', ', \\''.join(wl), ']'",
"Segmentation import numpy as np import math class TextRank4Sentence(object): def __init__(self, stop_words_file =",
"= 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。",
"get_key_sentences(self, num = 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result =",
"numpy as np import math class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'):",
":] = self.graph[x, :] / row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores",
"# text = codecs.open('../text/03.txt', 'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text,",
"[1 for num in vector3 if num > 0.] co_occur_num = sum(vector4) #",
"_gen_vectors(self, word_list1, word_list2): ''' 两个句子转换成两个同样大小向量。可以通过这两个向量来计算两个句子的相似度。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' words = list(set(word_list1 +",
"`speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences =",
"lambda item: item[1], reverse=True) # print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index])",
"np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y in xrange(x, sentences_num): similarity =",
"delimiters=delimiters) self.sentences = None self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters",
"'[', ', \\''.join(wl), ']' print for wl in tr4s.words_all_filters: print '[', ', \\''.join(wl),",
"on Dec 1, 2014 @author: letian ''' import networkx as nx from Segmentation",
"source = 'no_stop_words', sim_func = 'standard'): ''' `text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words,",
"word_list2) # print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4",
"self.sentences if source == 'no_filter': source = self.words_no_filter elif source == 'all_filters': source",
"similarity = sim_func(source[x], source[y]) self.graph[x, y] = similarity self.graph[y, x] = similarity #",
"print self.graph nx_graph = nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict",
"''' words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word in words]",
"print '[', ', \\''.join(wl), ']' print for wl in tr4s.words_no_stop_words: print '[', ',",
"encoding:utf-8 -*- ''' Created on Dec 1, 2014 @author: letian ''' import networkx",
"print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1",
"co_occur_num = sum(vector4) # print co_occur_num if co_occur_num == 0.: return 0. denominator",
"# print co_occur_num if co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1))) +",
"tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences)",
"count = 0 for sentence in self.key_sentences: if count >= num: break if",
"`text`:文本内容,字符串。 `lower`:是否将文本转换为小写。默认为False。 `speech_tag_filter`:若值为True,将调用内部的词性列表来过滤生成words_all_filters。 若值为False,words_all_filters与words_no_stop_words相同。 `source`:选择使用words_no_filter, words_no_stop_words, words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 '''",
"Created on Dec 1, 2014 @author: letian ''' import networkx as nx from",
"sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count = 0",
"[float(word_list2.count(word)) for word in words] return vector1, vector2 def get_key_sentences(self, num = 6,",
"sum(vector4) # print co_occur_num if co_occur_num == 0.: return 0. denominator = math.log(float(len(word_list1)))",
"words = list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word in words] vector2",
"source = self.words_no_stop_words sim_func = self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num))",
"self.graph[x, :] = self.graph[x, :] / row_sum # print self.graph nx_graph = nx.from_numpy_matrix(self.graph)",
"for word in words] vector2 = [float(word_list2.count(word)) for word in words] return vector1,",
"(self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text, lower=lower, speech_tag_filter=speech_tag_filter); # - # print self.sentences",
"'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words, self.words_all_filters) = self.seg.segment(text=text,",
"# self.graph[x, :] = self.graph[x, :] / row_sum # print self.graph nx_graph =",
"list(set(word_list1 + word_list2)) vector1 = [float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word))",
"np import math class TextRank4Sentence(object): def __init__(self, stop_words_file = None, delimiters='?!;?!。;…\\n'): ''' `stop_words_file`:默认值为None,此时内部停止词表为空;可以设置为文件路径(字符串),将从停止词文件中提取停止词。",
"[vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 = [1 for num in vector3 if",
"self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for y in xrange(x, sentences_num):",
"= 6, sentence_min_len = 6): ''' 获取最重要的num个长度大于等于sentence_min_len的句子用来生成摘要。 返回列表。 ''' result = [] count",
"= np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x, :] = self.graph[x,",
"`delimiters`:默认值是`'?!;?!。;…\\n'`,用来将文本拆分为句子。 self.sentences:由句子组成的列表。 self.words_no_filter:对sentences中每个句子分词而得到的两级列表。 self.words_no_stop_words:去掉words_no_filter中的停止词而得到的两级列表。 self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None",
"self.key_sentences: if count >= num: break if len(sentence) >= sentence_min_len: result.append(sentence) count +=",
"self._get_similarity_standard sentences_num = len(source) self.graph = np.zeros((sentences_num, sentences_num)) for x in xrange(sentences_num): for",
"''' result = [] count = 0 for sentence in self.key_sentences: if count",
"# row_sum = np.sum(self.graph[x, :]) # if row_sum > 0: # self.graph[x, :]",
"= Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None # 2维列表 self.words_no_stop_words =",
"x in xrange(len(vector1))] vector4 = [1 for num in vector3 if num >",
"== 0.: return 0. denominator = math.log(float(len(word_list1))) + math.log(float(len(word_list2))) # 分母 if denominator",
"0: # self.graph[x, :] = self.graph[x, :] / row_sum # print self.graph nx_graph",
"source = 'all_filters') print '\\n'.join(tr4s.get_key_sentences(num=1)) print '\\n'.join(tr4s.sentences) for wl in tr4s.words_no_filter: print '[',",
"num in vector3 if num > 0.] co_occur_num = sum(vector4) # print co_occur_num",
"']' print for wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print for",
"sorted(scores.items(), key = lambda item: item[1], reverse=True) # print sorted_scores for index, _",
"self.words_all_filters:保留words_no_stop_words中指定词性的单词而得到的两级列表。 ''' self.seg = Segmentation(stop_words_file=stop_words_file, delimiters=delimiters) self.sentences = None self.words_no_filter = None #",
"# print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 '''",
"self.sentences = None self.words_no_filter = None # 2维列表 self.words_no_stop_words = None self.words_all_filters =",
"words_all_filters中的哪一个来生成句子之间的相似度。 默认值为`'all_filters'`,可选值为`'no_filter', 'no_stop_words', 'all_filters'`。 `sim_func`: 指定计算句子相似度的函数。当前只有一个函数,对应默认值`standard`。 ''' self.key_sentences = [] (self.sentences, self.words_no_filter, self.words_no_stop_words,",
"# print vector1, vector2 vector3 = [vector1[x]*vector2[x] for x in xrange(len(vector1))] vector4 =",
"nx.from_numpy_matrix(self.graph) scores = nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(), key",
"self.graph[y, x] = similarity # for x in xrange(sentences_num): # row_sum = np.sum(self.graph[x,",
"word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表 ''' vector1, vector2 =self._gen_vectors(word_list1, word_list2) #",
"if num > 0.] co_occur_num = sum(vector4) # print co_occur_num if co_occur_num ==",
"count += 1 return result if __name__ == '__main__': import codecs # text",
"in words] vector2 = [float(word_list2.count(word)) for word in words] return vector1, vector2 def",
"for x in xrange(sentences_num): for y in xrange(x, sentences_num): similarity = sim_func(source[x], source[y])",
"nx.pagerank(nx_graph) # this is a dict sorted_scores = sorted(scores.items(), key = lambda item:",
"_ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。",
"import networkx as nx from Segmentation import Segmentation import numpy as np import",
"'r', 'utf-8').read() text = \"这间酒店位于北京东三环,里面摆放很多雕塑,文艺气息十足。答谢宴于晚上8点开始。\" tr4s = TextRank4Sentence(stop_words_file='../stopword.data') tr4s.train(text=text, speech_tag_filter=True, lower=True, source =",
"返回列表。 ''' result = [] count = 0 for sentence in self.key_sentences: if",
"item[1], reverse=True) # print sorted_scores for index, _ in sorted_scores: self.key_sentences.append(self.sentences[index]) # print",
"print for wl in tr4s.words_no_stop_words: print '[', ', \\''.join(wl), ']' print for wl",
"word_list2)) vector1 = [float(word_list1.count(word)) for word in words] vector2 = [float(word_list2.count(word)) for word",
"print self.sentences if source == 'no_filter': source = self.words_no_filter elif source == 'all_filters':",
"self.graph[x, y] = similarity self.graph[y, x] = similarity # for x in xrange(sentences_num):",
"self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2: 分别代表两个句子,都是由单词组成的列表",
"sorted_scores: self.key_sentences.append(self.sentences[index]) # print '\\n'.join(self.key_sentences) def _get_similarity_standard(self, word_list1, word_list2): ''' 默认的用于计算两个句子相似度的函数。 word_list1, word_list2:",
"= lambda item: item[1], reverse=True) # print sorted_scores for index, _ in sorted_scores:"
] |
[
"Name for Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name,",
"for Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description,",
"ActionAbapIsNotClientSpecificMixin from systemcheck import models from systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check",
"models from systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check,",
"nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description, Check.failcriteria, Check.criticality, SAP_USER_NAME,",
"systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin):",
"from systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin,",
"<filename>systemcheck/systems/ABAP/models/action_abap_validate_redundant_password_hashes_model.py from systemcheck.systems.ABAP.models import AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin from systemcheck import models from systemcheck.models.meta import",
"__mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description, Check.failcriteria, Check.criticality, SAP_USER_NAME, AbapSpoolParams_BAPIPRIPAR_Mixin.PDEST,",
"Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description, Check.failcriteria, Check.criticality,",
"Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer,",
"import AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin from systemcheck import models from systemcheck.models.meta import generic_repr from systemcheck.checks.models",
"ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step",
"systemcheck.systems.ABAP.models import AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin from systemcheck import models from systemcheck.models.meta import generic_repr from",
"ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME =",
"systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id",
"primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for Authorization Check', nullable=True)",
"generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ =",
"@generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True)",
"from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName",
"Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description, Check.failcriteria,",
"= models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for Authorization Check', nullable=True) __mapper_args__ =",
"SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for Authorization Check', nullable=True) __mapper_args__",
"qt_description='Background User Name for Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__",
"{ 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description, Check.failcriteria, Check.criticality, SAP_USER_NAME, AbapSpoolParams_BAPIPRIPAR_Mixin.PDEST, AbapSpoolParams_BAPIPRIPAR_Mixin.PRBIG, AbapSpoolParams_BAPIPRIPAR_Mixin.PRSAP]",
"models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for Authorization",
"models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for Authorization Check',",
"User', qt_description='Background User Name for Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, }",
"__tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User',",
"import models from systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class",
"class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME",
"models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for Authorization Check', nullable=True) __mapper_args__ = {",
"User Name for Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName, } __qtmap__ =",
"from systemcheck.systems.ABAP.models import AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin from systemcheck import models from systemcheck.models.meta import generic_repr",
"import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id =",
"AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12),",
"qt_label='Step User', qt_description='Background User Name for Authorization Check', nullable=True) __mapper_args__ = { 'polymorphic_identity':pluginName,",
"pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__ = pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'),",
"pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User",
"import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr class ActionAbapValidateRedundantPasswordHashes(Check, AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin): __tablename__",
"id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name",
"= pluginName id = models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background",
"from systemcheck import models from systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes'",
"AbapSpoolParams_BAPIPRIPAR_Mixin, ActionAbapIsNotClientSpecificMixin from systemcheck import models from systemcheck.models.meta import generic_repr from systemcheck.checks.models import",
"= models.meta.Column(models.meta.Integer, models.meta.ForeignKey('checks_metadata.id'), primary_key=True) SAP_USER_NAME = models.meta.Column(models.meta.String(12), qt_label='Step User', qt_description='Background User Name for",
"= { 'polymorphic_identity':pluginName, } __qtmap__ = [Check.name, Check.description, Check.failcriteria, Check.criticality, SAP_USER_NAME, AbapSpoolParams_BAPIPRIPAR_Mixin.PDEST, AbapSpoolParams_BAPIPRIPAR_Mixin.PRBIG,",
"systemcheck import models from systemcheck.models.meta import generic_repr from systemcheck.checks.models import Check pluginName='ActionAbapValidateRedundantPasswordHashes' @generic_repr"
] |
[
"w_k, nb_k, num_elec): ne = 0 for e_kn in e_skn: for e_n, w,",
"delta, e_skn, w_k, nb_k, num_elec): ne = 0 for e_kn in e_skn: for",
"x < -200: f = 1. elif x > 200: f = 0.",
"import print_function import numpy as np try: from builtins import range, zip except:",
"0. else: f = 1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi, delta, e_skn,",
"elif x > 200: f = 0. else: f = 1./(np.exp(x) + 1)",
"fermi-dirac distribution weight. \"\"\" x = (energy - e_fermi)/delta if x < -200:",
"distribution weight. \"\"\" x = (energy - e_fermi)/delta if x < -200: f",
"else: f = 1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi, delta, e_skn, w_k,",
"1. elif x > 200: f = 0. else: f = 1./(np.exp(x) +",
"f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne = 0 for e_kn",
"w, nb in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e) for e",
"f = 0. else: f = 1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi,",
"nb_k): f = [fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]] ne += np.sum(f)*w",
"1) return f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne = 0",
"numpy as np try: from builtins import range, zip except: pass def fermi_dirac(e_fermi,",
"from __future__ import print_function import numpy as np try: from builtins import range,",
"weight. \"\"\" x = (energy - e_fermi)/delta if x < -200: f =",
"= 0 for e_kn in e_skn: for e_n, w, nb in zip(e_kn, w_k,",
"= 1. elif x > 200: f = 0. else: f = 1./(np.exp(x)",
"ne = 0 for e_kn in e_skn: for e_n, w, nb in zip(e_kn,",
"np try: from builtins import range, zip except: pass def fermi_dirac(e_fermi, delta, energy):",
"import numpy as np try: from builtins import range, zip except: pass def",
"zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]] ne",
"(energy - e_fermi)/delta if x < -200: f = 1. elif x >",
"print_function import numpy as np try: from builtins import range, zip except: pass",
"e_skn: for e_n, w, nb in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta,",
"f = 1. elif x > 200: f = 0. else: f =",
"+ 1) return f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne =",
"builtins import range, zip except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac",
"= 1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec):",
"1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne",
"__future__ import print_function import numpy as np try: from builtins import range, zip",
"fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution weight. \"\"\" x = (energy -",
"e_kn in e_skn: for e_n, w, nb in zip(e_kn, w_k, nb_k): f =",
"= 0. else: f = 1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi, delta,",
"Return fermi-dirac distribution weight. \"\"\" x = (energy - e_fermi)/delta if x <",
"except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution weight. \"\"\" x",
"- e_fermi)/delta if x < -200: f = 1. elif x > 200:",
"= [fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]] ne += np.sum(f)*w return ne",
"f = [fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]] ne += np.sum(f)*w return",
"[fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]] ne += np.sum(f)*w return ne -",
"e_skn, w_k, nb_k, num_elec): ne = 0 for e_kn in e_skn: for e_n,",
"200: f = 0. else: f = 1./(np.exp(x) + 1) return f def",
"delta, energy): \"\"\" Return fermi-dirac distribution weight. \"\"\" x = (energy - e_fermi)/delta",
"e_n, w, nb in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e) for",
"for e_n, w, nb in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e)",
"nb_k, num_elec): ne = 0 for e_kn in e_skn: for e_n, w, nb",
"range, zip except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution weight.",
"0 for e_kn in e_skn: for e_n, w, nb in zip(e_kn, w_k, nb_k):",
"> 200: f = 0. else: f = 1./(np.exp(x) + 1) return f",
"try: from builtins import range, zip except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\"",
"in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]]",
"return f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne = 0 for",
"def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution weight. \"\"\" x = (energy",
"energy): \"\"\" Return fermi-dirac distribution weight. \"\"\" x = (energy - e_fermi)/delta if",
"x = (energy - e_fermi)/delta if x < -200: f = 1. elif",
"in e_skn: for e_n, w, nb in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi,",
"as np try: from builtins import range, zip except: pass def fermi_dirac(e_fermi, delta,",
"w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e) for e in e_n[:nb]] ne +=",
"num_elec): ne = 0 for e_kn in e_skn: for e_n, w, nb in",
"if x < -200: f = 1. elif x > 200: f =",
"pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution weight. \"\"\" x =",
"-200: f = 1. elif x > 200: f = 0. else: f",
"num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne = 0 for e_kn in e_skn:",
"e_fermi)/delta if x < -200: f = 1. elif x > 200: f",
"\"\"\" Return fermi-dirac distribution weight. \"\"\" x = (energy - e_fermi)/delta if x",
"< -200: f = 1. elif x > 200: f = 0. else:",
"def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k, num_elec): ne = 0 for e_kn in",
"import range, zip except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution",
"f = 1./(np.exp(x) + 1) return f def num_electron_diff(e_fermi, delta, e_skn, w_k, nb_k,",
"nb in zip(e_kn, w_k, nb_k): f = [fermi_dirac(e_fermi, delta, e) for e in",
"= (energy - e_fermi)/delta if x < -200: f = 1. elif x",
"from builtins import range, zip except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return",
"\"\"\" x = (energy - e_fermi)/delta if x < -200: f = 1.",
"x > 200: f = 0. else: f = 1./(np.exp(x) + 1) return",
"delta, e) for e in e_n[:nb]] ne += np.sum(f)*w return ne - num_elec",
"for e_kn in e_skn: for e_n, w, nb in zip(e_kn, w_k, nb_k): f",
"zip except: pass def fermi_dirac(e_fermi, delta, energy): \"\"\" Return fermi-dirac distribution weight. \"\"\""
] |
[
"test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure':",
"be the message preceeding the `ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list",
"False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret =",
"responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id':",
"True}), ] iter_ = responses.iter_rpcresponses(response_list) # should not include the ending message assert",
"nameko.legacy import responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure': False, 'ending':",
"def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_",
"from nameko.legacy import responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure': False,",
"Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_)",
"False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) # should not include the ending",
"Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}), ]",
"from mock import Mock from nameko.legacy import responses def test_iter_rpcresponses(): response_list = [",
"False, 'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list)",
"[ Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) # should",
"3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) # should not include",
"False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}),",
"iter_ = responses.iter_rpcresponses(response_list) # should not include the ending message assert list(iter_) ==",
"the `ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3,",
"Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id':",
"] iter_ = responses.iter_rpcresponses(response_list) # should not include the ending message assert list(iter_)",
"response_list = [ Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure': False,",
"mock import Mock from nameko.legacy import responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id':",
"'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) # should not include the ending message",
"2, 'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_",
"'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be the",
"'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ =",
"Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) # should not",
"False, 'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure': False,",
"import responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure': False, 'ending': False}),",
"1, 'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id': 3,",
"'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should",
"the message preceeding the `ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list =",
"preceeding the `ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id':",
"assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure': False,",
"'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure':",
"False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be",
"3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) #",
"responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be the message preceeding the `ending` assert",
"'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) # should not include the",
"def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id': 2,",
"should be the message preceeding the `ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only():",
"] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be the message preceeding",
"iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be the message preceeding the",
"= responses.last(iter_) # should be the message preceeding the `ending` assert ret.payload['id'] ==",
"`ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure':",
"'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) ret",
"responses.last(iter_) # should be the message preceeding the `ending` assert ret.payload['id'] == 2",
"test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ =",
"import Mock from nameko.legacy import responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1,",
"[ Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}),",
"= responses.iter_rpcresponses(response_list) # should not include the ending message assert list(iter_) == []",
"ret = responses.last(iter_) # should be the message preceeding the `ending` assert ret.payload['id']",
"response_list = [ Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list)",
"= [ Mock(payload={'id': 1, 'failure': False, 'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending':",
"message preceeding the `ending` assert ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list = [",
"== 2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure': False, 'ending': True}),",
"= responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be the message preceeding the `ending`",
"= [ Mock(payload={'id': 3, 'failure': False, 'ending': True}), ] iter_ = responses.iter_rpcresponses(response_list) #",
"'ending': False}), Mock(payload={'id': 2, 'failure': False, 'ending': False}), Mock(payload={'id': 3, 'failure': False, 'ending':",
"# should be the message preceeding the `ending` assert ret.payload['id'] == 2 def",
"2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure': False, 'ending': True}), ]",
"True}), ] iter_ = responses.iter_rpcresponses(response_list) ret = responses.last(iter_) # should be the message",
"ret.payload['id'] == 2 def test_iter_rpcresponses_ending_only(): response_list = [ Mock(payload={'id': 3, 'failure': False, 'ending':",
"Mock from nameko.legacy import responses def test_iter_rpcresponses(): response_list = [ Mock(payload={'id': 1, 'failure':"
] |
[
"* def load_nag(nag, path): \"\"\"Load a NAG from a file\"\"\" with open(path, 'r')",
". import * def load_nag(nag, path): \"\"\"Load a NAG from a file\"\"\" with",
"NAG from a file\"\"\" with open(path, 'r') as nag_file: nag_json = nag_file.read() nag",
"def load_nag(nag, path): \"\"\"Load a NAG from a file\"\"\" with open(path, 'r') as",
"a file\"\"\" with open(path, 'r') as nag_file: nag_json = nag_file.read() nag = convert_json_to_nag(nag_json)",
"from . import * def load_nag(nag, path): \"\"\"Load a NAG from a file\"\"\"",
"\"\"\"Load a NAG from a file\"\"\" with open(path, 'r') as nag_file: nag_json =",
"file\"\"\" with open(path, 'r') as nag_file: nag_json = nag_file.read() nag = convert_json_to_nag(nag_json) return",
"a NAG from a file\"\"\" with open(path, 'r') as nag_file: nag_json = nag_file.read()",
"with open(path, 'r') as nag_file: nag_json = nag_file.read() nag = convert_json_to_nag(nag_json) return nag",
"from a file\"\"\" with open(path, 'r') as nag_file: nag_json = nag_file.read() nag =",
"path): \"\"\"Load a NAG from a file\"\"\" with open(path, 'r') as nag_file: nag_json",
"import * def load_nag(nag, path): \"\"\"Load a NAG from a file\"\"\" with open(path,",
"load_nag(nag, path): \"\"\"Load a NAG from a file\"\"\" with open(path, 'r') as nag_file:"
] |
[
"get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True,",
"mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated,",
"self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get'])",
"= MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self,",
"de pesquisa (pode ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get'])",
"@action(detail=True, methods=['get']) def members(self, request, pk=None): instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True),",
"from rest_framework.decorators import action from rest_framework.response import Response from core.models import Notebook, Member",
"FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request, pk=None): instance: Notebook = self.get_object() serializer",
"else: membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False return True class",
"from rest_framework.response import Response from core.models import Notebook, Member from notebook.serializers.folder import FolderSerializer",
"obj: Notebook): if request.method in permissions.SAFE_METHODS: return True if view.action == 'destroy': if",
"import Parameter from drf_yasg.utils import swagger_auto_schema from rest_framework import authentication, permissions, viewsets, mixins",
"'query', required=True, type='string', description='_query_ de pesquisa (pode ser substituído pelo parâmetro `query`)')], responses={200:",
"request, pk=None): instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q',",
"request, view, obj: Notebook): if request.method in permissions.SAFE_METHODS: return True if view.action ==",
"self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de",
"if request.user != obj.owner: return False else: membership = obj.members.get(user=request.user) if membership.role !=",
"NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self):",
"== 'destroy': if request.user != obj.owner: return False else: membership = obj.members.get(user=request.user) if",
"import action from rest_framework.response import Response from core.models import Notebook, Member from notebook.serializers.folder",
"self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query', '') serializer = SearchResultSerializer(SearchResult(instance, query)) return",
"NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj:",
"notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search",
"= self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True,",
"def root(self, request, pk=None): instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data)",
"responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request, pk=None): instance: Notebook = self.get_object()",
"import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view,",
"import Notebook, Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook",
"FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult,",
"methods=['get']) def search(self, request, pk=None): instance: Notebook = self.get_object() query = request.query_params.get('q', None)",
"view.action == 'destroy': if request.user != obj.owner: return False else: membership = obj.members.get(user=request.user)",
"Parameter from drf_yasg.utils import swagger_auto_schema from rest_framework import authentication, permissions, viewsets, mixins from",
"instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True,",
"SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request, pk=None): instance: Notebook = self.get_object() query",
"Response from core.models import Notebook, Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import",
"Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class",
"mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset =",
") @action(detail=True, methods=['get']) def root(self, request, pk=None): instance: Notebook = self.get_object() serializer =",
"= (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return",
"from drf_yasg.openapi import Parameter from drf_yasg.utils import swagger_auto_schema from rest_framework import authentication, permissions,",
"ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request,",
"from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self,",
"authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if",
"(pode ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self,",
"membership.role != Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin,",
"queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema(",
"= self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_",
"action from rest_framework.response import Response from core.models import Notebook, Member from notebook.serializers.folder import",
"from core.models import Notebook, Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer",
"drf_yasg.utils import swagger_auto_schema from rest_framework import authentication, permissions, viewsets, mixins from rest_framework.decorators import",
"= Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200:",
"= NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def",
"rest_framework.decorators import action from rest_framework.response import Response from core.models import Notebook, Member from",
"serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa",
"mixins from rest_framework.decorators import action from rest_framework.response import Response from core.models import Notebook,",
"Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} )",
"mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission)",
"import swagger_auto_schema from rest_framework import authentication, permissions, viewsets, mixins from rest_framework.decorators import action",
"import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission):",
"class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,)",
"class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook): if request.method in permissions.SAFE_METHODS: return",
"return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self,",
"instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()}",
"def search(self, request, pk=None): instance: Notebook = self.get_object() query = request.query_params.get('q', None) or",
"permissions, viewsets, mixins from rest_framework.decorators import action from rest_framework.response import Response from core.models",
"mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes =",
"members(self, request, pk=None): instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data)",
"`query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request, pk=None): instance: Notebook =",
"request.user != obj.owner: return False else: membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN:",
"def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} )",
"Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request, pk=None): instance: Notebook",
"Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)}",
"view, obj: Notebook): if request.method in permissions.SAFE_METHODS: return True if view.action == 'destroy':",
"pk=None): instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query',",
"authentication, permissions, viewsets, mixins from rest_framework.decorators import action from rest_framework.response import Response from",
"serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all()",
"pk=None): instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200:",
"rest_framework.response import Response from core.models import Notebook, Member from notebook.serializers.folder import FolderSerializer from",
"@action(detail=True, methods=['get']) def search(self, request, pk=None): instance: Notebook = self.get_object() query = request.query_params.get('q',",
"return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request, pk=None):",
"root(self, request, pk=None): instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema(",
"request.method in permissions.SAFE_METHODS: return True if view.action == 'destroy': if request.user != obj.owner:",
"return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa (pode ser substituído",
"False else: membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False return True",
"notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook): if",
"import FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import",
"member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request, pk=None): instance: Notebook",
"self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request,",
"= (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous:",
"Notebook = self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query', '') serializer = SearchResultSerializer(SearchResult(instance,",
"parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request, pk=None): instance: Notebook",
"MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request, pk=None): instance: Notebook = self.get_object() serializer",
") @action(detail=True, methods=['get']) def members(self, request, pk=None): instance: Notebook = self.get_object() serializer =",
"mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset",
"search(self, request, pk=None): instance: Notebook = self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query',",
"!= Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin):",
"(permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user,",
"required=True, type='string', description='_query_ de pesquisa (pode ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()}",
"pk=None): instance: Notebook = self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query', '') serializer",
"return False else: membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False return",
"obj.owner: return False else: membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False",
"(authentication.TokenAuthentication,) permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return",
"True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes =",
"MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request,",
"methods=['get']) def root(self, request, pk=None): instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return",
"import Response from core.models import Notebook, Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member",
"SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook): if request.method in",
"@swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request, pk=None): instance: Notebook =",
"Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string',",
"core.models import Notebook, Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer from",
"Notebook, Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import",
"= obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin,",
"Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa (pode ser substituído pelo",
"many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request, pk=None):",
"NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook): if request.method in permissions.SAFE_METHODS: return True",
"FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa (pode ser",
"def has_object_permission(self, request, view, obj: Notebook): if request.method in permissions.SAFE_METHODS: return True if",
"@action(detail=True, methods=['get']) def root(self, request, pk=None): instance: Notebook = self.get_object() serializer = FolderSerializer(instance.root_folder)",
"in permissions.SAFE_METHODS: return True if view.action == 'destroy': if request.user != obj.owner: return",
"return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request, pk=None): instance:",
"from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from",
"responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request, pk=None): instance: Notebook = self.get_object()",
"from drf_yasg.utils import swagger_auto_schema from rest_framework import authentication, permissions, viewsets, mixins from rest_framework.decorators",
"permissions.SAFE_METHODS: return True if view.action == 'destroy': if request.user != obj.owner: return False",
"@swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa (pode ser substituído pelo parâmetro",
"self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def",
"swagger_auto_schema from rest_framework import authentication, permissions, viewsets, mixins from rest_framework.decorators import action from",
"NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True)",
"permission_classes = (permissions.IsAuthenticated, NotebookRolePermission) queryset = Notebook.objects.all() def get_queryset(self): if self.request.user.is_anonymous: return self.queryset",
"= self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query', '') serializer = SearchResultSerializer(SearchResult(instance, query))",
"return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class =",
"!= obj.owner: return False else: membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return",
"import authentication, permissions, viewsets, mixins from rest_framework.decorators import action from rest_framework.response import Response",
"methods=['get']) def members(self, request, pk=None): instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True)",
"notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class",
"return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes",
"= FolderSerializer(instance.root_folder) return Response(serializer.data) @swagger_auto_schema( manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa (pode",
"membership = obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet,",
"if self.request.user.is_anonymous: return self.queryset return Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get'])",
"'destroy': if request.user != obj.owner: return False else: membership = obj.members.get(user=request.user) if membership.role",
"<filename>app/notebook/views/notebook.py<gh_stars>1-10 from drf_yasg.openapi import Parameter from drf_yasg.utils import swagger_auto_schema from rest_framework import authentication,",
"if membership.role != Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin,",
"manual_parameters=[Parameter('q', 'query', required=True, type='string', description='_query_ de pesquisa (pode ser substituído pelo parâmetro `query`)')],",
"request, pk=None): instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema(",
"viewsets, mixins from rest_framework.decorators import action from rest_framework.response import Response from core.models import",
"rest_framework import authentication, permissions, viewsets, mixins from rest_framework.decorators import action from rest_framework.response import",
"from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer",
"import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook): if request.method",
"if view.action == 'destroy': if request.user != obj.owner: return False else: membership =",
"responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request, pk=None): instance: Notebook = self.get_object()",
"request, pk=None): instance: Notebook = self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query', '')",
"Notebook): if request.method in permissions.SAFE_METHODS: return True if view.action == 'destroy': if request.user",
"from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook):",
"instance: Notebook = self.get_object() query = request.query_params.get('q', None) or request.query_params.get('query', '') serializer =",
"type='string', description='_query_ de pesquisa (pode ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} )",
"Member from notebook.serializers.folder import FolderSerializer from notebook.serializers.member import MemberSerializer from notebook.serializers.notebook import NotebookSerializer",
"serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return Response(serializer.data) @swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def",
"NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer authentication_classes = (authentication.TokenAuthentication,) permission_classes",
") @action(detail=True, methods=['get']) def search(self, request, pk=None): instance: Notebook = self.get_object() query =",
"pesquisa (pode ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def",
"substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request, pk=None):",
"@swagger_auto_schema( responses={200: FolderSerializer()} ) @action(detail=True, methods=['get']) def root(self, request, pk=None): instance: Notebook =",
"if request.method in permissions.SAFE_METHODS: return True if view.action == 'destroy': if request.user !=",
"has_object_permission(self, request, view, obj: Notebook): if request.method in permissions.SAFE_METHODS: return True if view.action",
"False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin): serializer_class = NotebookSerializer",
"SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request, view, obj: Notebook): if request.method in permissions.SAFE_METHODS:",
"description='_query_ de pesquisa (pode ser substituído pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True,",
"drf_yasg.openapi import Parameter from drf_yasg.utils import swagger_auto_schema from rest_framework import authentication, permissions, viewsets,",
"from rest_framework import authentication, permissions, viewsets, mixins from rest_framework.decorators import action from rest_framework.response",
"query = request.query_params.get('q', None) or request.query_params.get('query', '') serializer = SearchResultSerializer(SearchResult(instance, query)) return Response(serializer.data)",
"Notebook.objects.filter(member__user=self.request.user, member__is_active=True) @swagger_auto_schema( responses={200: MemberSerializer(many=True)} ) @action(detail=True, methods=['get']) def members(self, request, pk=None): instance:",
"obj.members.get(user=request.user) if membership.role != Member.Roles.ADMIN: return False return True class NotebookViewSet(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin,",
"return True if view.action == 'destroy': if request.user != obj.owner: return False else:",
"notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def has_object_permission(self, request,",
"True if view.action == 'destroy': if request.user != obj.owner: return False else: membership",
"MemberSerializer from notebook.serializers.notebook import NotebookSerializer from notebook.serializers.search import SearchResult, SearchResultSerializer class NotebookRolePermission(permissions.BasePermission): def",
"pelo parâmetro `query`)')], responses={200: SearchResultSerializer()} ) @action(detail=True, methods=['get']) def search(self, request, pk=None): instance:",
"def members(self, request, pk=None): instance: Notebook = self.get_object() serializer = MemberSerializer(instance.members.filter(is_active=True), many=True) return"
] |
[
"import json from discord.utils import get from pymongo import MongoClient, collation from discord.ext",
"reason: reason = \"Reason was not specified\" # Bans member if the author",
"*reason): # Sets default reason if not specified if not reason: reason =",
"commands, tasks import time import os import pymongo as pm import asyncio import",
"discord.ext import commands, tasks import time import os import pymongo as pm import",
"= ' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked with reason \"{reason}\"') await ctx.guild.kick(member,",
"'.join(map(str, reason)) await ctx.reply(f'{member} was kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else:",
"member.top_role.position: reason = ' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked with reason \"{reason}\"')",
"if not specified if not reason: reason = \"Reason was not specified\" #",
"# Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason): #",
"asyncio import random import datetime import copy class Kick(commands.Cog): def __init__(self, bot): self.bot",
"role than the subject. if member is None: await ctx.reply(\"Please mention someone to",
"await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you are trying to kick is",
"import get from pymongo import MongoClient, collation from discord.ext import commands, tasks import",
"member:discord.Member, *reason): # Sets default reason if not specified if not reason: reason",
"# Bans member if the author has a higher role than the subject.",
"reason=reason) else: await ctx.reply(\"The person you are trying to kick is more powerful",
"default reason if not specified if not reason: reason = \"Reason was not",
"reason)) await ctx.reply(f'{member} was kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await",
"\"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you are trying to kick",
"mention someone to kick\") else: if ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str,",
"' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason)",
"def __init__(self, bot): self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async",
"not specified\" # Bans member if the author has a higher role than",
"from ..admin.managecommands import perms import json from discord.utils import get from pymongo import",
"Bans member if the author has a higher role than the subject. if",
"async def kick(self, ctx, member:discord.Member, *reason): # Sets default reason if not specified",
"ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you are trying to kick is more",
"as pm import asyncio import random import datetime import copy class Kick(commands.Cog): def",
"import datetime import copy class Kick(commands.Cog): def __init__(self, bot): self.bot = bot #",
"if the author has a higher role than the subject. if member is",
"was not specified\" # Bans member if the author has a higher role",
"if ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked",
"discord.utils import get from pymongo import MongoClient, collation from discord.ext import commands, tasks",
"await ctx.reply(f'{member} was kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The",
"pymongo as pm import asyncio import random import datetime import copy class Kick(commands.Cog):",
"if member is None: await ctx.reply(\"Please mention someone to kick\") else: if ctx.author.top_role.position",
"Kick(commands.Cog): def __init__(self, bot): self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms)",
"datetime import copy class Kick(commands.Cog): def __init__(self, bot): self.bot = bot # Kick",
"someone to kick\") else: if ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str, reason))",
"self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx,",
"reason = \"Reason was not specified\" # Bans member if the author has",
"not specified if not reason: reason = \"Reason was not specified\" # Bans",
"Sets default reason if not specified if not reason: reason = \"Reason was",
"json from discord.utils import get from pymongo import MongoClient, collation from discord.ext import",
"import commands, tasks import time import os import pymongo as pm import asyncio",
"kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you are",
"random import datetime import copy class Kick(commands.Cog): def __init__(self, bot): self.bot = bot",
"bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason):",
"> member.top_role.position: reason = ' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked with reason",
"subject. if member is None: await ctx.reply(\"Please mention someone to kick\") else: if",
"bot): self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self,",
"def kick(self, ctx, member:discord.Member, *reason): # Sets default reason if not specified if",
"os import pymongo as pm import asyncio import random import datetime import copy",
"@commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason): # Sets default",
"await ctx.reply(\"Please mention someone to kick\") else: if ctx.author.top_role.position > member.top_role.position: reason =",
"else: await ctx.reply(\"The person you are trying to kick is more powerful than",
"import discord from ..admin.managecommands import perms import json from discord.utils import get from",
"the author has a higher role than the subject. if member is None:",
"was kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you",
"@commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason): # Sets default reason if",
"kick(self, ctx, member:discord.Member, *reason): # Sets default reason if not specified if not",
"to kick\") else: if ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str, reason)) await",
"reason = ' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked with reason \"{reason}\"') await",
"perms import json from discord.utils import get from pymongo import MongoClient, collation from",
"import copy class Kick(commands.Cog): def __init__(self, bot): self.bot = bot # Kick @commands.command(pass_context=True)",
"not reason: reason = \"Reason was not specified\" # Bans member if the",
"ctx.reply(f'{member} was kicked with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person",
"None: await ctx.reply(\"Please mention someone to kick\") else: if ctx.author.top_role.position > member.top_role.position: reason",
"with reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you are trying",
"specified if not reason: reason = \"Reason was not specified\" # Bans member",
"pymongo import MongoClient, collation from discord.ext import commands, tasks import time import os",
"higher role than the subject. if member is None: await ctx.reply(\"Please mention someone",
"member is None: await ctx.reply(\"Please mention someone to kick\") else: if ctx.author.top_role.position >",
"author has a higher role than the subject. if member is None: await",
"import asyncio import random import datetime import copy class Kick(commands.Cog): def __init__(self, bot):",
"is None: await ctx.reply(\"Please mention someone to kick\") else: if ctx.author.top_role.position > member.top_role.position:",
"person you are trying to kick is more powerful than you\") def setup(bot):",
"import pymongo as pm import asyncio import random import datetime import copy class",
"collation from discord.ext import commands, tasks import time import os import pymongo as",
"time import os import pymongo as pm import asyncio import random import datetime",
"ctx, member:discord.Member, *reason): # Sets default reason if not specified if not reason:",
"= bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member,",
"import random import datetime import copy class Kick(commands.Cog): def __init__(self, bot): self.bot =",
"from pymongo import MongoClient, collation from discord.ext import commands, tasks import time import",
"discord from ..admin.managecommands import perms import json from discord.utils import get from pymongo",
"\"Reason was not specified\" # Bans member if the author has a higher",
"class Kick(commands.Cog): def __init__(self, bot): self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True)",
"if not reason: reason = \"Reason was not specified\" # Bans member if",
"await ctx.reply(\"The person you are trying to kick is more powerful than you\")",
"copy class Kick(commands.Cog): def __init__(self, bot): self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True)",
"Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason): # Sets",
"reason if not specified if not reason: reason = \"Reason was not specified\"",
"kick\") else: if ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str, reason)) await ctx.reply(f'{member}",
"specified\" # Bans member if the author has a higher role than the",
"..admin.managecommands import perms import json from discord.utils import get from pymongo import MongoClient,",
"has a higher role than the subject. if member is None: await ctx.reply(\"Please",
"import time import os import pymongo as pm import asyncio import random import",
"tasks import time import os import pymongo as pm import asyncio import random",
"from discord.utils import get from pymongo import MongoClient, collation from discord.ext import commands,",
"import os import pymongo as pm import asyncio import random import datetime import",
"@commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason): # Sets default reason if not",
"MongoClient, collation from discord.ext import commands, tasks import time import os import pymongo",
"get from pymongo import MongoClient, collation from discord.ext import commands, tasks import time",
"ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str, reason)) await ctx.reply(f'{member} was kicked with",
"member if the author has a higher role than the subject. if member",
"than the subject. if member is None: await ctx.reply(\"Please mention someone to kick\")",
"ctx.reply(\"Please mention someone to kick\") else: if ctx.author.top_role.position > member.top_role.position: reason = '",
"else: if ctx.author.top_role.position > member.top_role.position: reason = ' '.join(map(str, reason)) await ctx.reply(f'{member} was",
"reason \"{reason}\"') await ctx.guild.kick(member, reason=reason) else: await ctx.reply(\"The person you are trying to",
"= \"Reason was not specified\" # Bans member if the author has a",
"from discord.ext import commands, tasks import time import os import pymongo as pm",
"pm import asyncio import random import datetime import copy class Kick(commands.Cog): def __init__(self,",
"ctx.reply(\"The person you are trying to kick is more powerful than you\") def",
"# Sets default reason if not specified if not reason: reason = \"Reason",
"the subject. if member is None: await ctx.reply(\"Please mention someone to kick\") else:",
"import perms import json from discord.utils import get from pymongo import MongoClient, collation",
"you are trying to kick is more powerful than you\") def setup(bot): bot.add_cog(Kick(bot))",
"import MongoClient, collation from discord.ext import commands, tasks import time import os import",
"@commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def kick(self, ctx, member:discord.Member, *reason): # Sets default reason",
"a higher role than the subject. if member is None: await ctx.reply(\"Please mention",
"__init__(self, bot): self.bot = bot # Kick @commands.command(pass_context=True) @commands.has_permissions(kick_members=True) @commands.bot_has_permissions(kick_members=True) @commands.check(perms) async def"
] |
[
"+ 0.01 == 1.01 assert not 1.00 + 0.01 is 1.01 assert type(hash('s'))",
"utf-8 -*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00 + 0.01) ==",
"\"\"\" assert hash(1.00 + 0.01) == hash(1.01) assert id(1.00 + 0.01) != id(1.01)",
"coding: utf-8 -*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00 + 0.01)",
"-*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00 + 0.01) == hash(1.01)",
"1.00 + 0.01 == 1.01 assert not 1.00 + 0.01 is 1.01 assert",
"hash(1.00 + 0.01) == hash(1.01) assert id(1.00 + 0.01) != id(1.01) assert 1.00",
"assert hash(1.00 + 0.01) == hash(1.01) assert id(1.00 + 0.01) != id(1.01) assert",
"+ 0.01) == hash(1.01) assert id(1.00 + 0.01) != id(1.01) assert 1.00 +",
"0.01 == 1.01 assert not 1.00 + 0.01 is 1.01 assert type(hash('s')) ==",
"\"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00 + 0.01) == hash(1.01) assert",
"+ 0.01) != id(1.01) assert 1.00 + 0.01 == 1.01 assert not 1.00",
"assert 1.00 + 0.01 == 1.01 assert not 1.00 + 0.01 is 1.01",
"id(1.01) assert 1.00 + 0.01 == 1.01 assert not 1.00 + 0.01 is",
"#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\"",
"python3 # -*- coding: utf-8 -*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert",
"== 1.01 assert not 1.00 + 0.01 is 1.01 assert type(hash('s')) == int",
"没什么关系, \"\"\" assert hash(1.00 + 0.01) == hash(1.01) assert id(1.00 + 0.01) !=",
"0.01) != id(1.01) assert 1.00 + 0.01 == 1.01 assert not 1.00 +",
"和 hash 没什么关系, \"\"\" assert hash(1.00 + 0.01) == hash(1.01) assert id(1.00 +",
"assert id(1.00 + 0.01) != id(1.01) assert 1.00 + 0.01 == 1.01 assert",
"== hash(1.01) assert id(1.00 + 0.01) != id(1.01) assert 1.00 + 0.01 ==",
"hash(1.01) assert id(1.00 + 0.01) != id(1.01) assert 1.00 + 0.01 == 1.01",
"0.01) == hash(1.01) assert id(1.00 + 0.01) != id(1.01) assert 1.00 + 0.01",
"id(1.00 + 0.01) != id(1.01) assert 1.00 + 0.01 == 1.01 assert not",
"反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00 + 0.01) == hash(1.01) assert id(1.00",
"hash 没什么关系, \"\"\" assert hash(1.00 + 0.01) == hash(1.01) assert id(1.00 + 0.01)",
"-*- coding: utf-8 -*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00 +",
"!= id(1.01) assert 1.00 + 0.01 == 1.01 assert not 1.00 + 0.01",
"# -*- coding: utf-8 -*- \"\"\" 反直觉,is 和 hash 没什么关系, \"\"\" assert hash(1.00"
] |
[
"rights reserved. # This file is licensed to you under the Apache License,",
"distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY",
"License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to",
"{ 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self,",
"under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs",
"KIND, either express or implied. See the License for the specific language #",
"permissions and limitations under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from",
"License for the specific language # governing permissions and limitations under the License.",
"# Copyright 2021 Adobe. All rights reserved. # This file is licensed to",
"agreed to in writing, software distributed under # the License is distributed on",
"software distributed under # the License is distributed on an \"AS IS\" BASIS,",
"= Engine(service_id) self.inputs = inputs self.outputs = outputs def to_json(self): return json.dumps(self, cls=JSONHintEncoder,",
"licensed to you under the Apache License, Version 2.0 (the \"License\"); # you",
"import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import",
"OF ANY KIND, either express or implied. See the License for the specific",
"} def __init__(self, service_id, inputs: Inputs, outputs : Outputs): self.engine = Engine(service_id) self.inputs",
"from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = {",
"'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self, service_id, inputs: Inputs, outputs",
"the Apache License, Version 2.0 (the \"License\"); # you may not use this",
"not use this file except in compliance with the License. You may obtain",
"applicable law or agreed to in writing, software distributed under # the License",
"copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable",
"service_id, inputs: Inputs, outputs : Outputs): self.engine = Engine(service_id) self.inputs = inputs self.outputs",
"'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self, service_id,",
"'outputs' : 'cpf:outputs' } def __init__(self, service_id, inputs: Inputs, outputs : Outputs): self.engine",
"Adobe. All rights reserved. # This file is licensed to you under the",
"may not use this file except in compliance with the License. You may",
"compliance with the License. You may obtain a copy # of the License",
"# the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR",
": 'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self, service_id, inputs: Inputs, outputs :",
"you under the Apache License, Version 2.0 (the \"License\"); # you may not",
"You may obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # #",
"License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs",
"JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs'",
"a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by",
"'cpf:outputs' } def __init__(self, service_id, inputs: Inputs, outputs : Outputs): self.engine = Engine(service_id)",
"adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests:",
"the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS",
"is licensed to you under the Apache License, Version 2.0 (the \"License\"); #",
"Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder",
"express or implied. See the License for the specific language # governing permissions",
": 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self, service_id, inputs:",
"from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from",
"ANY KIND, either express or implied. See the License for the specific language",
"to in writing, software distributed under # the License is distributed on an",
"the specific language # governing permissions and limitations under the License. import json",
": 'cpf:outputs' } def __init__(self, service_id, inputs: Inputs, outputs : Outputs): self.engine =",
"self.engine = Engine(service_id) self.inputs = inputs self.outputs = outputs def to_json(self): return json.dumps(self,",
"License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS #",
"adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder",
"License, Version 2.0 (the \"License\"); # you may not use this file except",
"except in compliance with the License. You may obtain a copy # of",
"or agreed to in writing, software distributed under # the License is distributed",
"Unless required by applicable law or agreed to in writing, software distributed under",
"the License for the specific language # governing permissions and limitations under the",
"the License. You may obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0",
"governing permissions and limitations under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine",
"\"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY KIND, either express",
"def __init__(self, service_id, inputs: Inputs, outputs : Outputs): self.engine = Engine(service_id) self.inputs =",
"Copyright 2021 Adobe. All rights reserved. # This file is licensed to you",
"2.0 (the \"License\"); # you may not use this file except in compliance",
"or implied. See the License for the specific language # governing permissions and",
"specific language # governing permissions and limitations under the License. import json from",
"writing, software distributed under # the License is distributed on an \"AS IS\"",
"# you may not use this file except in compliance with the License.",
"# of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law",
"by applicable law or agreed to in writing, software distributed under # the",
"See the License for the specific language # governing permissions and limitations under",
": Outputs): self.engine = Engine(service_id) self.inputs = inputs self.outputs = outputs def to_json(self):",
"file except in compliance with the License. You may obtain a copy #",
"(the \"License\"); # you may not use this file except in compliance with",
"__init__(self, service_id, inputs: Inputs, outputs : Outputs): self.engine = Engine(service_id) self.inputs = inputs",
"this file except in compliance with the License. You may obtain a copy",
"# # Unless required by applicable law or agreed to in writing, software",
"'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self, service_id, inputs: Inputs, outputs : Outputs):",
"obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required",
"Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint =",
"Version 2.0 (the \"License\"); # you may not use this file except in",
"json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs",
"# Unless required by applicable law or agreed to in writing, software distributed",
"you may not use this file except in compliance with the License. You",
"file is licensed to you under the Apache License, Version 2.0 (the \"License\");",
"from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine', 'inputs'",
"on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY KIND,",
"WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY KIND, either express or implied. See",
"# This file is licensed to you under the Apache License, Version 2.0",
"import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine' :",
"'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' } def __init__(self, service_id, inputs: Inputs,",
"inputs: Inputs, outputs : Outputs): self.engine = Engine(service_id) self.inputs = inputs self.outputs =",
"Inputs, outputs : Outputs): self.engine = Engine(service_id) self.inputs = inputs self.outputs = outputs",
"in compliance with the License. You may obtain a copy # of the",
"implied. See the License for the specific language # governing permissions and limitations",
"Outputs): self.engine = Engine(service_id) self.inputs = inputs self.outputs = outputs def to_json(self): return",
"distributed under # the License is distributed on an \"AS IS\" BASIS, WITHOUT",
"# governing permissions and limitations under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import",
"required by applicable law or agreed to in writing, software distributed under #",
"with the License. You may obtain a copy # of the License at",
"limitations under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import",
"the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed",
"json_hint = { 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' }",
"outputs : Outputs): self.engine = Engine(service_id) self.inputs = inputs self.outputs = outputs def",
"class CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' :",
"# OF ANY KIND, either express or implied. See the License for the",
"self.inputs = inputs self.outputs = outputs def to_json(self): return json.dumps(self, cls=JSONHintEncoder, indent=4, sort_keys=True)",
"of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or",
"<filename>src/adobe/pdfservices/operation/internal/api/dto/request/platform/cpf_content_analyzer_req.py # Copyright 2021 Adobe. All rights reserved. # This file is licensed",
"This file is licensed to you under the Apache License, Version 2.0 (the",
"import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import",
"may obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 # # Unless",
"in writing, software distributed under # the License is distributed on an \"AS",
"CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs'",
"to you under the Apache License, Version 2.0 (the \"License\"); # you may",
"OR REPRESENTATIONS # OF ANY KIND, either express or implied. See the License",
"adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine', 'inputs' :",
"\"License\"); # you may not use this file except in compliance with the",
"either express or implied. See the License for the specific language # governing",
"use this file except in compliance with the License. You may obtain a",
"All rights reserved. # This file is licensed to you under the Apache",
"an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY KIND, either",
"IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY KIND, either express or",
"under # the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES",
"Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine',",
"License. You may obtain a copy # of the License at http://www.apache.org/licenses/LICENSE-2.0 #",
"for the specific language # governing permissions and limitations under the License. import",
"law or agreed to in writing, software distributed under # the License is",
"language # governing permissions and limitations under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine",
"import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint",
"REPRESENTATIONS # OF ANY KIND, either express or implied. See the License for",
"import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs',",
"at http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in",
"and limitations under the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs",
"Apache License, Version 2.0 (the \"License\"); # you may not use this file",
"2021 Adobe. All rights reserved. # This file is licensed to you under",
"the License. import json from adobe.pdfservices.operation.internal.api.dto.request.platform.engine import Engine from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from",
"adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class CPFContentAnalyzerRequests: json_hint = { 'engine'",
"Engine(service_id) self.inputs = inputs self.outputs = outputs def to_json(self): return json.dumps(self, cls=JSONHintEncoder, indent=4,",
"= { 'engine' : 'cpf:engine', 'inputs' : 'cpf:inputs', 'outputs' : 'cpf:outputs' } def",
"http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,",
"from adobe.pdfservices.operation.internal.api.dto.request.platform.inputs import Inputs from adobe.pdfservices.operation.internal.api.dto.request.platform.outputs import Outputs from adobe.pdfservices.operation.internal.util.json_hint_encoder import JSONHintEncoder class",
"reserved. # This file is licensed to you under the Apache License, Version",
"WARRANTIES OR REPRESENTATIONS # OF ANY KIND, either express or implied. See the",
"BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF ANY KIND, either express or implied.",
"is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS # OF",
"under the Apache License, Version 2.0 (the \"License\"); # you may not use"
] |
[
"the API but we do not want to expose personal info in code",
"IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This is the HTID",
"\".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item",
"import stopwords stops = set(stopwords.words('english')) pns_list = [] for i in range(1, max(counts['page'])+1):",
"= [] for subtree in ne_tree: if type(subtree) == Tree: # If subtree",
"in tokens if token[0].isupper()] combs = [f'{x} {y}' for x, y in combinations(pns,",
"maxRows=5, key=USERNAME) jsons = [] for result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create",
"w, c in text_data: for i in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary?",
"matches the top country coordinates = [] for i, results in enumerate(geocoder_results): for",
"from nltk.chunk import conlltags2tree from nltk.tree import Tree import pandas as pd from",
"width=960, height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5,",
"to access the API but we do not want to expose personal info",
"locally by adding USERNAME to environment variables, e.g. to .env, as follows: #",
"tagged_tokens if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree",
"stops and len(token) > 2] pns = [token for token in tokens if",
"== 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for map",
"map info places_list = [name for name, _ in most_common_locations][:3] # Limit to",
"for name, _ in most_common_locations][:3] # Limit to top three most_common_locations = dict(most_common_locations)",
"for putting together with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 # # For",
"if tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data",
"which country appears most often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results",
"tokens if token.lower() not in stops and len(token) > 2] pns = [token",
"_ in most_common_locations][:3] # Limit to top three most_common_locations = dict(most_common_locations) # Turn",
"= counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords stops = set(stopwords.words('english'))",
"Iterate over geocoder_results and keep the first lat/long that matches the top country",
"# Loop through and multiply words by counts text_list = [] for w,",
"ne_label = subtree.label() ne_string = \" \".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string,",
"from nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize from nltk import pos_tag from",
"= dict(most_common_locations) # Turn mcl into dictionary # Retrieve json from geonames API",
"sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and keep the first lat/long that matches",
"fr: tokens = vol.tokenlist() # Create pandas dataframe with relevant data temp =",
"list of 'country' from the geonames json results countries = [] for results",
"encoding='utf-8') # Functions for putting together with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949",
"pos_tag from nltk.chunk import conlltags2tree from nltk.tree import Tree import pandas as pd",
"some data for map info places_list = [name for name, _ in most_common_locations][:3]",
"# Turn mcl into dictionary # Retrieve json from geonames API (for fun",
"NE != \"O\" ne_label = subtree.label() ne_string = \" \".join([token for token, pos",
"countries.append(item['country']) # Determine which country appears most often top_country = sorted(Counter(countries))[0] print(top_country) #",
"Tree import pandas as pd from htrc_features import FeatureReader import geocoder import folium",
"(IOB) logic # Cf. https://stackoverflow.com/a/30666949 # # For more information on IOB tagging,",
"do not want to expose personal info in code # # Run this",
"more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This",
"excavations,\" <NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\" # Get HTEF data for",
"import pos_tag from nltk.chunk import conlltags2tree from nltk.tree import Tree import pandas as",
"USERNAME to environment variables, e.g. to .env, as follows: # > export USERNAME=<insert",
"NER Tagger # Ignore deprecation warning for now; we'll deal with it when",
"token in tokens if token[0].isupper()] combs = [f'{x} {y}' for x, y in",
"information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This is",
"environment variables, e.g. to .env, as follows: # > export USERNAME=<insert username here>",
"tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This is the HTID for...",
"this ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for vol in fr: tokens =",
"top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and keep the first lat/long",
"os import random from collections import Counter, defaultdict import random from nltk.tag import",
"import random from nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize from nltk import",
"If subtree is a noun chunk, i.e. NE != \"O\" ne_label = subtree.label()",
"inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 # # For more information on IOB",
"pns = [token for token in tokens if token[0].isupper()] combs = [f'{x} {y}'",
"Organize some data for map info places_list = [name for name, _ in",
"up Folium and populate with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron',",
"Ignore deprecation warning for now; we'll deal with it when the time comes!",
"environment variable # Geonames requires a username to access the API but we",
"zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc',",
"= FeatureReader(ids=[htid]) for vol in fr: tokens = vol.tokenlist() # Create pandas dataframe",
"results in enumerate(geocoder_results): for item in results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng'])))",
"set(stopwords.words('english')) pns_list = [] for i in range(1, max(counts['page'])+1): tokens = counts[counts['page'] ==",
"i.e. NE != \"O\" ne_label = subtree.label() ne_string = \" \".join([token for token,",
"enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {}",
"is the HTID for... # \"Ancient Corinth: A guide to the excavations,\" <NAME>,",
"import Tree import pandas as pd from htrc_features import FeatureReader import geocoder import",
"dictionary # Retrieve json from geonames API (for fun this time using geocoder)",
"tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply words",
"Get HTEF data for this ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for vol",
"import random from collections import Counter, defaultdict import random from nltk.tag import StanfordNERTagger",
"[token for token in tokens if token[0].isupper()] combs = [f'{x} {y}' for x,",
"warning for now; we'll deal with it when the time comes! st =",
"random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction)",
"for results in geocoder_results: for item in results: if 'country' in item.keys(): countries.append(item['country'])",
"chunk, i.e. NE != \"O\" ne_label = subtree.label() ne_string = \" \".join([token for",
"from geonames API (for fun this time using geocoder) geocoder_results = [] for",
"this locally by adding USERNAME to environment variables, e.g. to .env, as follows:",
"counts[counts['page'] == i]['token'].tolist() tokens = [token for token in tokens if token.lower() not",
"book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer et al.\\'s \"Ancient",
"# Limit to top three most_common_locations = dict(most_common_locations) # Turn mcl into dictionary",
"in range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens = [token for token",
"Loop through and multiply words by counts text_list = [] for w, c",
"A guide to the excavations,\" weighted by frequency.') basemap page = 87 test",
"import os import random from collections import Counter, defaultdict import random from nltk.tag",
"defaultdict import random from nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize from nltk",
"ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for vol in fr: tokens = vol.tokenlist()",
"for place in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for",
"87 test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords stops",
"item in results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get",
"Setup Stanford NER Tagger # Ignore deprecation warning for now; we'll deal with",
"https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This is the HTID for... # \"Ancient",
"and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply words by",
"time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together with",
"now; we'll deal with it when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar',",
"= StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together with inside-outside-beginning (IOB) logic",
"= sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and keep the first lat/long that",
"in tagged_tokens if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for",
"# Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens",
"folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]],",
"from the geonames json results countries = [] for results in geocoder_results: for",
"dataframe with relevant data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token',",
"c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer et al.\\'s \"Ancient Corinth:",
"geocoder) geocoder_results = [] for place in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME)",
"range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner",
"populate with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for",
"place in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for result",
"to expose personal info in code # # Run this locally by adding",
"follows: # > export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') # Setup Stanford",
"#page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item for",
"= Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for map info places_list = [name",
"{}) appears {} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations",
"\"Ancient Corinth: A guide to the excavations,\" <NAME>, <NAME>, and <NAME> htid =",
"counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text using tokens and counts text_data =",
"Geonames requires a username to access the API but we do not want",
"through and multiply words by counts text_list = [] for w, c in",
"c in text_data: for i in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction",
"color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {} times in book.'.format(places_list[i], c[0],",
"c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {} times in",
"three most_common_locations = dict(most_common_locations) # Turn mcl into dictionary # Retrieve json from",
"multiply words by counts text_list = [] for w, c in text_data: for",
"Folium and populate with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960,",
"tqdm import tqdm # Set environment variable # Geonames requires a username to",
"access the API but we do not want to expose personal info in",
"Corinth: A guide to the excavations,\" weighted by frequency.') basemap page = 87",
"most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for map info places_list =",
"# Retrieve json from geonames API (for fun this time using geocoder) geocoder_results",
"> export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') # Setup Stanford NER Tagger",
"subtree.label() ne_string = \" \".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations",
"from htrc_features import FeatureReader import geocoder import folium from pprint import pprint from",
"from nltk.tokenize import word_tokenize from nltk import pos_tag from nltk.chunk import conlltags2tree from",
"json from geonames API (for fun this time using geocoder) geocoder_results = []",
"len(token) > 2] pns = [token for token in tokens if token[0].isupper()] combs",
"often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and keep the first",
"item in tagged_tokens if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = []",
"for tag in ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations)",
"= [] for place in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons =",
"counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list",
"range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens = [token for token in",
"basemap page = 87 test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus",
"from nltk import pos_tag from nltk.chunk import conlltags2tree from nltk.tree import Tree import",
"nltk.tree import Tree import pandas as pd from htrc_features import FeatureReader import geocoder",
"geocoder_results and keep the first lat/long that matches the top country coordinates =",
"= [token for token in tokens if token.lower() not in stops and len(token)",
"= os.getenv('USERNAME') # Setup Stanford NER Tagger # Ignore deprecation warning for now;",
"= tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10]",
"relevant data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count']",
"to environment variables, e.g. to .env, as follows: # > export USERNAME=<insert username",
"to the excavations,\" <NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\" # Get HTEF",
"counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply words by counts",
"# \"Ancient Corinth: A guide to the excavations,\" <NAME>, <NAME>, and <NAME> htid",
"Tree: # If subtree is a noun chunk, i.e. NE != \"O\" ne_label",
"collections import Counter, defaultdict import random from nltk.tag import StanfordNERTagger from nltk.tokenize import",
"counts text_list = [] for w, c in text_data: for i in range(0,",
"\".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag",
"top three most_common_locations = dict(most_common_locations) # Turn mcl into dictionary # Retrieve json",
"in stops and len(token) > 2] pns = [token for token in tokens",
"= [item for item in tagged_tokens if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens)",
"here> USERNAME = os.getenv('USERNAME') # Setup Stanford NER Tagger # Ignore deprecation warning",
"[] for result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list of 'country'",
"as follows: # > export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') # Setup",
"= [name for name, _ in most_common_locations][:3] # Limit to top three most_common_locations",
"guide to the excavations,\" weighted by frequency.') basemap page = 87 test =",
"API (for fun this time using geocoder) geocoder_results = [] for place in",
"FeatureReader(ids=[htid]) for vol in fr: tokens = vol.tokenlist() # Create pandas dataframe with",
"fr = FeatureReader(ids=[htid]) for vol in fr: tokens = vol.tokenlist() # Create pandas",
"if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in",
"'country' from the geonames json results countries = [] for results in geocoder_results:",
"the excavations,\" weighted by frequency.') basemap page = 87 test = counts[counts['page'] ==",
"in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended =",
"in most_common_locations][:3] # Limit to top three most_common_locations = dict(most_common_locations) # Turn mcl",
"together with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 # # For more information",
"by counts text_list = [] for w, c in text_data: for i in",
"# Sample HathiTrust ID # This is the HTID for... # \"Ancient Corinth:",
"= \"wu.89079728994\" # Get HTEF data for this ID; specifically tokenlist fr =",
"for now print(places_list) print(coordinates) # Set up Folium and populate with weighted coordinates",
"import geocoder import folium from pprint import pprint from tqdm import tqdm #",
"fill_color='#3186cc', popup='{} ({}, {}) appears {} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map",
"nltk import pos_tag from nltk.chunk import conlltags2tree from nltk.tree import Tree import pandas",
"and populate with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512)",
"with it when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions",
"times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer et",
"in item.keys(): countries.append(item['country']) # Determine which country appears most often top_country = sorted(Counter(countries))[0]",
"= subtree.label() ne_string = \" \".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label))",
"in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer et al.\\'s",
"to the excavations,\" weighted by frequency.') basemap page = 87 test = counts[counts['page']",
"most_common_locations = dict(most_common_locations) # Turn mcl into dictionary # Retrieve json from geonames",
"= vol.tokenlist() # Create pandas dataframe with relevant data temp = tokens.index.values.tolist() counts",
"personal info in code # # Run this locally by adding USERNAME to",
"in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag in ne_in_sent if tag[1]",
"HTID for... # \"Ancient Corinth: A guide to the excavations,\" <NAME>, <NAME>, and",
"into dictionary # Retrieve json from geonames API (for fun this time using",
"top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the first item for now print(places_list)",
"'/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together with inside-outside-beginning (IOB) logic # Cf.",
"ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in ne_tree: if type(subtree) ==",
"et al.\\'s \"Ancient Corinth: A guide to the excavations,\" weighted by frequency.') basemap",
"<NAME>, and <NAME> htid = \"wu.89079728994\" # Get HTEF data for this ID;",
"geonames json results countries = [] for results in geocoder_results: for item in",
"random from nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize from nltk import pos_tag",
"in ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize",
"word_tokenize from nltk import pos_tag from nltk.chunk import conlltags2tree from nltk.tree import Tree",
"variable # Geonames requires a username to access the API but we do",
"subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag in ne_in_sent if tag[1] ==",
"coordinates = [] for i, results in enumerate(geocoder_results): for item in results: if",
"subtree is a noun chunk, i.e. NE != \"O\" ne_label = subtree.label() ne_string",
"for i in range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens = [token",
"first item for now print(places_list) print(coordinates) # Set up Folium and populate with",
"max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens = [token for token in tokens",
"nltk.tokenize import word_tokenize from nltk import pos_tag from nltk.chunk import conlltags2tree from nltk.tree",
"top country coordinates = [] for i, results in enumerate(geocoder_results): for item in",
"stopwords stops = set(stopwords.words('english')) pns_list = [] for i in range(1, max(counts['page'])+1): tokens",
"folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {} times",
"tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item for item in tagged_tokens",
"pandas as pd from htrc_features import FeatureReader import geocoder import folium from pprint",
"but we do not want to expose personal info in code # #",
"for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{}",
"in fr: tokens = vol.tokenlist() # Create pandas dataframe with relevant data temp",
"counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct",
"pprint from tqdm import tqdm # Set environment variable # Geonames requires a",
"not want to expose personal info in code # # Run this locally",
"a list of 'country' from the geonames json results countries = [] for",
"tag in ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) #",
"specifically tokenlist fr = FeatureReader(ids=[htid]) for vol in fr: tokens = vol.tokenlist() #",
"item for now print(places_list) print(coordinates) # Set up Folium and populate with weighted",
"# > export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') # Setup Stanford NER",
"!= \"O\" ne_label = subtree.label() ne_string = \" \".join([token for token, pos in",
"= \" \".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title()",
"result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list of 'country' from the",
"\" \".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens =",
"c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer et al.\\'s \"Ancient Corinth: A",
"[token for token in tokens if token.lower() not in stops and len(token) >",
"data for this ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for vol in fr:",
"Retrieve json from geonames API (for fun this time using geocoder) geocoder_results =",
"get the first item for now print(places_list) print(coordinates) # Set up Folium and",
"== Tree: # If subtree is a noun chunk, i.e. NE != \"O\"",
"2] pns = [token for token in tokens if token[0].isupper()] combs = [f'{x}",
"the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together",
"ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some",
"info places_list = [name for name, _ in most_common_locations][:3] # Limit to top",
"> 2] pns = [token for token in tokens if token[0].isupper()] combs =",
"results: if 'country' in item.keys(): countries.append(item['country']) # Determine which country appears most often",
"stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in ne_tree: if type(subtree) == Tree: #",
"appears {} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in",
"# Determine which country appears most often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate",
"with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i,",
"as pd from htrc_features import FeatureReader import geocoder import folium from pprint import",
"23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25,",
"fun this time using geocoder) geocoder_results = [] for place in places_list: results",
"Create a list of 'country' from the geonames json results countries = []",
"in geocoder_results: for item in results: if 'country' in item.keys(): countries.append(item['country']) # Determine",
"Broneer et al.\\'s \"Ancient Corinth: A guide to the excavations,\" weighted by frequency.')",
"st.tag(tokens) tagged_tokens = [item for item in tagged_tokens if item[0] != ''] ne_tree",
"subtree in ne_tree: if type(subtree) == Tree: # If subtree is a noun",
"basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c in enumerate(coordinates):",
"a username to access the API but we do not want to expose",
"'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text using tokens and counts",
"mcl into dictionary # Retrieve json from geonames API (for fun this time",
"''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in ne_tree: if type(subtree)",
"# Only get the first item for now print(places_list) print(coordinates) # Set up",
"# Create pandas dataframe with relevant data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp,",
"see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This is the HTID for... #",
"excavations,\" weighted by frequency.') basemap page = 87 test = counts[counts['page'] == page]['token'].tolist()",
"i in range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens = [token for",
"import pandas as pd from htrc_features import FeatureReader import geocoder import folium from",
"we'll deal with it when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8')",
"API but we do not want to expose personal info in code #",
"i]['token'].tolist() tokens = [token for token in tokens if token.lower() not in stops",
"# Organize some data for map info places_list = [name for name, _",
"time using geocoder) geocoder_results = [] for place in places_list: results = geocoder.geonames(place,",
"of 'country' from the geonames json results countries = [] for results in",
"page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item for item in",
"fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap)",
"for map info places_list = [name for name, _ in most_common_locations][:3] # Limit",
"type(subtree) == Tree: # If subtree is a noun chunk, i.e. NE !=",
"this time using geocoder) geocoder_results = [] for place in places_list: results =",
"for this ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for vol in fr: tokens",
"if type(subtree) == Tree: # If subtree is a noun chunk, i.e. NE",
"results countries = [] for results in geocoder_results: for item in results: if",
"nltk.chunk import conlltags2tree from nltk.tree import Tree import pandas as pd from htrc_features",
"in enumerate(geocoder_results): for item in results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break",
"of relevant locations in Broneer et al.\\'s \"Ancient Corinth: A guide to the",
"Run this locally by adding USERNAME to environment variables, e.g. to .env, as",
"= 87 test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords",
"token in tokens if token.lower() not in stops and len(token) > 2] pns",
"json results countries = [] for results in geocoder_results: for item in results:",
"# Iterate over geocoder_results and keep the first lat/long that matches the top",
"that matches the top country coordinates = [] for i, results in enumerate(geocoder_results):",
"= tokens['count'].tolist() counts[:10] # Reconstruct text using tokens and counts text_data = list(zip(counts['token'].tolist(),",
"# # Run this locally by adding USERNAME to environment variables, e.g. to",
"# Cf. https://stackoverflow.com/a/30666949 # # For more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging)",
"key=USERNAME) jsons = [] for result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a",
"geocoder import folium from pprint import pprint from tqdm import tqdm # Set",
"the first lat/long that matches the top country coordinates = [] for i,",
"== i]['token'].tolist() tokens = [token for token in tokens if token.lower() not in",
"item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in ne_tree:",
"test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords stops =",
"tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for",
"ne_string = \" \".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations =",
"for i, results in enumerate(geocoder_results): for item in results: if item['country'] == top_country:",
"and keep the first lat/long that matches the top country coordinates = []",
"page = 87 test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import",
"item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the first item for",
"tokenlist fr = FeatureReader(ids=[htid]) for vol in fr: tokens = vol.tokenlist() # Create",
"the first item for now print(places_list) print(coordinates) # Set up Folium and populate",
"it when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for",
"in code # # Run this locally by adding USERNAME to environment variables,",
"Tagger # Ignore deprecation warning for now; we'll deal with it when the",
"# Functions for putting together with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 #",
"print(test) print(len(test)) from nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list = [] for",
"for i in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list)",
".env, as follows: # > export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') #",
"from pprint import pprint from tqdm import tqdm # Set environment variable #",
"Set environment variable # Geonames requires a username to access the API but",
"for item in tagged_tokens if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent =",
"for w, c in text_data: for i in range(0, c): text_list.append(w) random.shuffle(text_list) #",
"popup='{} ({}, {}) appears {} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of",
"import folium from pprint import pprint from tqdm import tqdm # Set environment",
"token.lower() not in stops and len(token) > 2] pns = [token for token",
"locations in Broneer et al.\\'s \"Ancient Corinth: A guide to the excavations,\" weighted",
"[tag[0].title() for tag in ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10)",
"relevant locations in Broneer et al.\\'s \"Ancient Corinth: A guide to the excavations,\"",
"weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c",
"HTEF data for this ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for vol in",
"Create pandas dataframe with relevant data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page',",
"for item in results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only",
"stops = set(stopwords.words('english')) pns_list = [] for i in range(1, max(counts['page'])+1): tokens =",
"logic # Cf. https://stackoverflow.com/a/30666949 # # For more information on IOB tagging, see",
"data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] =",
"i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({},",
"For more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID #",
"# This is the HTID for... # \"Ancient Corinth: A guide to the",
"temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist()",
"folium from pprint import pprint from tqdm import tqdm # Set environment variable",
"# Set environment variable # Geonames requires a username to access the API",
"(for fun this time using geocoder) geocoder_results = [] for place in places_list:",
"tokens if token[0].isupper()] combs = [f'{x} {y}' for x, y in combinations(pns, 2)]",
"in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list of 'country' from the geonames",
"# Geonames requires a username to access the API but we do not",
"https://stackoverflow.com/a/30666949 # # For more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample",
"= word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item for item in tagged_tokens if",
"and multiply words by counts text_list = [] for w, c in text_data:",
"# Imports import os import random from collections import Counter, defaultdict import random",
"coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c in",
"# Create a list of 'country' from the geonames json results countries =",
"to .env, as follows: # > export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME')",
"and len(token) > 2] pns = [token for token in tokens if token[0].isupper()]",
"StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together with inside-outside-beginning (IOB) logic #",
"# Setup Stanford NER Tagger # Ignore deprecation warning for now; we'll deal",
"# Run this locally by adding USERNAME to environment variables, e.g. to .env,",
"[item for item in tagged_tokens if item[0] != ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent",
"from tqdm import tqdm # Set environment variable # Geonames requires a username",
"c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner tokens",
"item in results: if 'country' in item.keys(): countries.append(item['country']) # Determine which country appears",
"pns_list = [] for i in range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist()",
"import FeatureReader import geocoder import folium from pprint import pprint from tqdm import",
"list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply words by counts text_list = []",
"text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner tokens =",
"radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {} times in book.'.format(places_list[i],",
"= geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for result in results: jsons.append(result.json) geocoder_results.append(jsons)",
"jsons = [] for result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list",
"= counts[counts['page'] == i]['token'].tolist() tokens = [token for token in tokens if token.lower()",
"Imports import os import random from collections import Counter, defaultdict import random from",
"'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text using tokens and",
"USERNAME = os.getenv('USERNAME') # Setup Stanford NER Tagger # Ignore deprecation warning for",
"= \" \".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens",
"A guide to the excavations,\" <NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\" #",
"[name for name, _ in most_common_locations][:3] # Limit to top three most_common_locations =",
"= stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in ne_tree: if type(subtree) == Tree:",
"({}, {}) appears {} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant",
"for token in tokens if token.lower() not in stops and len(token) > 2]",
"deprecation warning for now; we'll deal with it when the time comes! st",
"for vol in fr: tokens = vol.tokenlist() # Create pandas dataframe with relevant",
"= st.tag(tokens) tagged_tokens = [item for item in tagged_tokens if item[0] != '']",
"words by counts text_list = [] for w, c in text_data: for i",
"= set(stopwords.words('english')) pns_list = [] for i in range(1, max(counts['page'])+1): tokens = counts[counts['page']",
"results in geocoder_results: for item in results: if 'country' in item.keys(): countries.append(item['country']) #",
"Stanford NER Tagger # Ignore deprecation warning for now; we'll deal with it",
"for token in tokens if token[0].isupper()] combs = [f'{x} {y}' for x, y",
"in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for result in",
"import conlltags2tree from nltk.tree import Tree import pandas as pd from htrc_features import",
"dict(most_common_locations) # Turn mcl into dictionary # Retrieve json from geonames API (for",
"from collections import Counter, defaultdict import random from nltk.tag import StanfordNERTagger from nltk.tokenize",
"height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc',",
"pandas dataframe with relevant data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section',",
"[] for i, results in enumerate(geocoder_results): for item in results: if item['country'] ==",
"# Get HTEF data for this ID; specifically tokenlist fr = FeatureReader(ids=[htid]) for",
"word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item for item in tagged_tokens if item[0]",
"= [tag[0].title() for tag in ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations =",
"in results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the",
"geonames API (for fun this time using geocoder) geocoder_results = [] for place",
"[] for i in range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens =",
"now print(places_list) print(coordinates) # Set up Folium and populate with weighted coordinates basemap",
"expose personal info in code # # Run this locally by adding USERNAME",
"text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply words by counts text_list",
"info in code # # Run this locally by adding USERNAME to environment",
"with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 # # For more information on",
"first lat/long that matches the top country coordinates = [] for i, results",
"Determine which country appears most often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over",
"e.g. to .env, as follows: # > export USERNAME=<insert username here> USERNAME =",
"<NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\" # Get HTEF data for this",
"'country' in item.keys(): countries.append(item['country']) # Determine which country appears most often top_country =",
"geocoder_results = [] for place in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons",
"if token.lower() not in stops and len(token) > 2] pns = [token for",
"print(coordinates) # Set up Folium and populate with weighted coordinates basemap = folium.Map(location=[37.97945,",
"is a noun chunk, i.e. NE != \"O\" ne_label = subtree.label() ne_string =",
"tokens = counts[counts['page'] == i]['token'].tolist() tokens = [token for token in tokens if",
"[] for results in geocoder_results: for item in results: if 'country' in item.keys():",
"pprint(most_common_locations) # Organize some data for map info places_list = [name for name,",
"<NAME> htid = \"wu.89079728994\" # Get HTEF data for this ID; specifically tokenlist",
"country appears most often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and",
"tqdm # Set environment variable # Geonames requires a username to access the",
"when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting",
"code # # Run this locally by adding USERNAME to environment variables, e.g.",
"'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text using tokens and counts text_data",
"counts[:10] # Reconstruct text using tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) #",
"pprint import pprint from tqdm import tqdm # Set environment variable # Geonames",
"ne_tree: if type(subtree) == Tree: # If subtree is a noun chunk, i.e.",
"\" \".join([token for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for",
"i, results in enumerate(geocoder_results): for item in results: if item['country'] == top_country: coordinates.append((float(item['lat']),",
"jsons.append(result.json) geocoder_results.append(jsons) # Create a list of 'country' from the geonames json results",
"in tokens if token.lower() not in stops and len(token) > 2] pns =",
"token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag in ne_in_sent",
"os.getenv('USERNAME') # Setup Stanford NER Tagger # Ignore deprecation warning for now; we'll",
"country coordinates = [] for i, results in enumerate(geocoder_results): for item in results:",
"print(places_list) print(coordinates) # Set up Folium and populate with weighted coordinates basemap =",
"Cf. https://stackoverflow.com/a/30666949 # # For more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) #",
"StanfordNERTagger from nltk.tokenize import word_tokenize from nltk import pos_tag from nltk.chunk import conlltags2tree",
"tagged_tokens = st.tag(tokens) tagged_tokens = [item for item in tagged_tokens if item[0] !=",
"geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for result in results: jsons.append(result.json) geocoder_results.append(jsons) #",
"in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears",
"\"O\" ne_label = subtree.label() ne_string = \" \".join([token for token, pos in subtree.leaves()])",
"keep the first lat/long that matches the top country coordinates = [] for",
"want to expose personal info in code # # Run this locally by",
"[] for place in places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = []",
"# Set up Folium and populate with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622],",
"Corinth: A guide to the excavations,\" <NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\"",
"= folium.Map(location=[37.97945, 23.71622], zoom_start=8, tiles='cartodbpositron', width=960, height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0],",
"comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together with inside-outside-beginning",
"ne_label)) locations = [tag[0].title() for tag in ne_in_sent if tag[1] == 'LOCATION'] print(locations)",
"countries = [] for results in geocoder_results: for item in results: if 'country'",
"tagged_tokens = [item for item in tagged_tokens if item[0] != ''] ne_tree =",
"most often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and keep the",
"nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize from nltk import pos_tag from nltk.chunk",
"# Reconstruct text using tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop",
"columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text using tokens",
"USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') # Setup Stanford NER Tagger # Ignore",
"float(item['lng']))) break # Only get the first item for now print(places_list) print(coordinates) #",
"noun chunk, i.e. NE != \"O\" ne_label = subtree.label() ne_string = \" \".join([token",
"[] for w, c in text_data: for i in range(0, c): text_list.append(w) random.shuffle(text_list)",
"in Broneer et al.\\'s \"Ancient Corinth: A guide to the excavations,\" weighted by",
"al.\\'s \"Ancient Corinth: A guide to the excavations,\" weighted by frequency.') basemap page",
"\"Ancient Corinth: A guide to the excavations,\" weighted by frequency.') basemap page =",
"Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens =",
"\"wu.89079728994\" # Get HTEF data for this ID; specifically tokenlist fr = FeatureReader(ids=[htid])",
"Set up Folium and populate with weighted coordinates basemap = folium.Map(location=[37.97945, 23.71622], zoom_start=8,",
"'LOCATION'] print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for map info",
"text_data: for i in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \"",
"print(locations) most_common_locations = Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for map info places_list",
"if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the first item",
"FeatureReader import geocoder import folium from pprint import pprint from tqdm import tqdm",
"from nltk.tree import Tree import pandas as pd from htrc_features import FeatureReader import",
"import Counter, defaultdict import random from nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize",
"name, _ in most_common_locations][:3] # Limit to top three most_common_locations = dict(most_common_locations) #",
"random from collections import Counter, defaultdict import random from nltk.tag import StanfordNERTagger from",
"places_list: results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for result in results:",
"not in stops and len(token) > 2] pns = [token for token in",
"we do not want to expose personal info in code # # Run",
"pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text using",
"if token[0].isupper()] combs = [f'{x} {y}' for x, y in combinations(pns, 2)] pns_list.extend(combs)",
"page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list = []",
"using tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply",
"for item in results: if 'country' in item.keys(): countries.append(item['country']) # Determine which country",
"tokens = vol.tokenlist() # Create pandas dataframe with relevant data temp = tokens.index.values.tolist()",
"tokens['count'].tolist() counts[:10] # Reconstruct text using tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist()))",
"text_reconstruction = \" \".join(text_list) #page_words_extended = page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens)",
"HathiTrust ID # This is the HTID for... # \"Ancient Corinth: A guide",
"the top country coordinates = [] for i, results in enumerate(geocoder_results): for item",
"results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list of 'country' from the geonames json",
"item.keys(): countries.append(item['country']) # Determine which country appears most often top_country = sorted(Counter(countries))[0] print(top_country)",
"on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID # This is the",
"tokens = [token for token in tokens if token.lower() not in stops and",
"# Ignore deprecation warning for now; we'll deal with it when the time",
"htrc_features import FeatureReader import geocoder import folium from pprint import pprint from tqdm",
"if 'country' in item.keys(): countries.append(item['country']) # Determine which country appears most often top_country",
"vol in fr: tokens = vol.tokenlist() # Create pandas dataframe with relevant data",
"results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the first",
"This is the HTID for... # \"Ancient Corinth: A guide to the excavations,\"",
"import tqdm # Set environment variable # Geonames requires a username to access",
"weighted by frequency.') basemap page = 87 test = counts[counts['page'] == page]['token'].tolist() print(test)",
"adding USERNAME to environment variables, e.g. to .env, as follows: # > export",
"Limit to top three most_common_locations = dict(most_common_locations) # Turn mcl into dictionary #",
"fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {}) appears {} times in book.'.format(places_list[i], c[0], c[1],",
"Reconstruct text using tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through",
"import StanfordNERTagger from nltk.tokenize import word_tokenize from nltk import pos_tag from nltk.chunk import",
"print(top_country) # Iterate over geocoder_results and keep the first lat/long that matches the",
"over geocoder_results and keep the first lat/long that matches the top country coordinates",
"conlltags2tree from nltk.tree import Tree import pandas as pd from htrc_features import FeatureReader",
"most_common_locations][:3] # Limit to top three most_common_locations = dict(most_common_locations) # Turn mcl into",
"places_list = [name for name, _ in most_common_locations][:3] # Limit to top three",
"= list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and multiply words by counts text_list =",
"appears most often top_country = sorted(Counter(countries))[0] print(top_country) # Iterate over geocoder_results and keep",
"# If subtree is a noun chunk, i.e. NE != \"O\" ne_label =",
"pd from htrc_features import FeatureReader import geocoder import folium from pprint import pprint",
"guide to the excavations,\" <NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\" # Get",
"for subtree in ne_tree: if type(subtree) == Tree: # If subtree is a",
"i in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction = \" \".join(text_list) #page_words_extended",
"= pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] # Reconstruct text",
"geocoder_results: for item in results: if 'country' in item.keys(): countries.append(item['country']) # Determine which",
"c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True, fill_opacity=0.5, fill_color='#3186cc', popup='{} ({}, {})",
"by adding USERNAME to environment variables, e.g. to .env, as follows: # >",
"the HTID for... # \"Ancient Corinth: A guide to the excavations,\" <NAME>, <NAME>,",
"pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag in ne_in_sent if",
"{} times in book.'.format(places_list[i], c[0], c[1], most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer",
"nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list = [] for i in range(1,",
"Sample HathiTrust ID # This is the HTID for... # \"Ancient Corinth: A",
"= [token for token in tokens if token[0].isupper()] combs = [f'{x} {y}' for",
"and <NAME> htid = \"wu.89079728994\" # Get HTEF data for this ID; specifically",
"lat/long that matches the top country coordinates = [] for i, results in",
"username to access the API but we do not want to expose personal",
"locations = [tag[0].title() for tag in ne_in_sent if tag[1] == 'LOCATION'] print(locations) most_common_locations",
"for token, pos in subtree.leaves()]) ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag in",
"with relevant data temp = tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos'])",
"Only get the first item for now print(places_list) print(coordinates) # Set up Folium",
"vol.tokenlist() # Create pandas dataframe with relevant data temp = tokens.index.values.tolist() counts =",
"print(len(test)) from nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list = [] for i",
"putting together with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 # # For more",
"frequency.') basemap page = 87 test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test)) from",
"using geocoder) geocoder_results = [] for place in places_list: results = geocoder.geonames(place, maxRows=5,",
"ID # This is the HTID for... # \"Ancient Corinth: A guide to",
"= [] for result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list of",
"coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the first item for now print(places_list) print(coordinates)",
"tokens.index.values.tolist() counts = pd.DataFrame.from_records(temp, columns=['page', 'section', 'token', 'pos']) counts['count'] = tokens['count'].tolist() counts[:10] #",
"in text_data: for i in range(0, c): text_list.append(w) random.shuffle(text_list) # Necessary? text_reconstruction =",
"= page_words+page_ner tokens = word_tokenize(text_reconstruction) tagged_tokens = st.tag(tokens) tagged_tokens = [item for item",
"for... # \"Ancient Corinth: A guide to the excavations,\" <NAME>, <NAME>, and <NAME>",
"data for map info places_list = [name for name, _ in most_common_locations][:3] #",
"== top_country: coordinates.append((float(item['lat']), float(item['lng']))) break # Only get the first item for now",
"import pprint from tqdm import tqdm # Set environment variable # Geonames requires",
"tiles='cartodbpositron', width=960, height=512) for i, c in enumerate(coordinates): folium.CircleMarker([c[0], c[1]], radius=most_common_locations[places_list[i]]*.25, color='#3186cc', fill=True,",
"ne_in_sent.append((ne_string, ne_label)) locations = [tag[0].title() for tag in ne_in_sent if tag[1] == 'LOCATION']",
"= [] for w, c in text_data: for i in range(0, c): text_list.append(w)",
"from nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list = [] for i in",
"geocoder_results.append(jsons) # Create a list of 'country' from the geonames json results countries",
"username here> USERNAME = os.getenv('USERNAME') # Setup Stanford NER Tagger # Ignore deprecation",
"in ne_tree: if type(subtree) == Tree: # If subtree is a noun chunk,",
"for result in results: jsons.append(result.json) geocoder_results.append(jsons) # Create a list of 'country' from",
"break # Only get the first item for now print(places_list) print(coordinates) # Set",
"# For more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust ID",
"[] for subtree in ne_tree: if type(subtree) == Tree: # If subtree is",
"!= ''] ne_tree = stanfordNE2tree(tagged_tokens) ne_in_sent = [] for subtree in ne_tree: if",
"Counter(locations).most_common(10) pprint(most_common_locations) # Organize some data for map info places_list = [name for",
"print('Map of relevant locations in Broneer et al.\\'s \"Ancient Corinth: A guide to",
"# # For more information on IOB tagging, see https://en.wikipedia.org/wiki/Inside–outside–beginning_(tagging) # Sample HathiTrust",
"st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') # Functions for putting together with inside-outside-beginning (IOB)",
"most_common_locations[places_list[i]])).add_to(basemap) print('Map of relevant locations in Broneer et al.\\'s \"Ancient Corinth: A guide",
"= [] for results in geocoder_results: for item in results: if 'country' in",
"ne_in_sent = [] for subtree in ne_tree: if type(subtree) == Tree: # If",
"results = geocoder.geonames(place, maxRows=5, key=USERNAME) jsons = [] for result in results: jsons.append(result.json)",
"the excavations,\" <NAME>, <NAME>, and <NAME> htid = \"wu.89079728994\" # Get HTEF data",
"export USERNAME=<insert username here> USERNAME = os.getenv('USERNAME') # Setup Stanford NER Tagger #",
"counts['count'].tolist())) # Loop through and multiply words by counts text_list = [] for",
"import word_tokenize from nltk import pos_tag from nltk.chunk import conlltags2tree from nltk.tree import",
"= [] for i, results in enumerate(geocoder_results): for item in results: if item['country']",
"htid = \"wu.89079728994\" # Get HTEF data for this ID; specifically tokenlist fr",
"Counter, defaultdict import random from nltk.tag import StanfordNERTagger from nltk.tokenize import word_tokenize from",
"enumerate(geocoder_results): for item in results: if item['country'] == top_country: coordinates.append((float(item['lat']), float(item['lng']))) break #",
"in results: if 'country' in item.keys(): countries.append(item['country']) # Determine which country appears most",
"deal with it when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz', '/usr/local/share/stanford-ner/stanford-ner.jar', encoding='utf-8') #",
"variables, e.g. to .env, as follows: # > export USERNAME=<insert username here> USERNAME",
"Functions for putting together with inside-outside-beginning (IOB) logic # Cf. https://stackoverflow.com/a/30666949 # #",
"text using tokens and counts text_data = list(zip(counts['token'].tolist(), counts['count'].tolist())) # Loop through and",
"for now; we'll deal with it when the time comes! st = StanfordNERTagger('/usr/local/share/stanford-ner/classifiers/english.all.3class.distsim.crf.ser.gz',",
"by frequency.') basemap page = 87 test = counts[counts['page'] == page]['token'].tolist() print(test) print(len(test))",
"to top three most_common_locations = dict(most_common_locations) # Turn mcl into dictionary # Retrieve",
"a noun chunk, i.e. NE != \"O\" ne_label = subtree.label() ne_string = \"",
"the geonames json results countries = [] for results in geocoder_results: for item",
"== page]['token'].tolist() print(test) print(len(test)) from nltk.corpus import stopwords stops = set(stopwords.words('english')) pns_list =",
"Turn mcl into dictionary # Retrieve json from geonames API (for fun this",
"requires a username to access the API but we do not want to",
"text_list = [] for w, c in text_data: for i in range(0, c):",
"= [] for i in range(1, max(counts['page'])+1): tokens = counts[counts['page'] == i]['token'].tolist() tokens"
] |
[
"-> bool: return True def next(self, frame_id: int) -> Frame: if not self.frame",
"import Frame, FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id",
"not grabbed: grabbed, image = self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id =",
"self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config,",
"debug from virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def",
"self.frame or self.current_id != frame_id: grabbed = False while not grabbed: grabbed, image",
"= \"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec)",
"Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2,",
"c3, c4 = \"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4)",
"__init__(self): super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4",
"super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 =",
"cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), )",
"import cv2 import virtcam.debug as debug from virtcam.base import Frame, FrameSource, Image, Mask,",
"self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return True def next(self, frame_id:",
"immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2)",
"= False while not grabbed: grabbed, image = self.camera.read() self.frame = Frame(self.config, immutable(image),",
"= cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)),",
"int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self)",
"frame_id: grabbed = False while not grabbed: grabbed, image = self.camera.read() self.frame =",
"int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig)",
"c1, c2, c3, c4 = \"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2,",
"def __init__(self): super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3,",
"True def next(self, frame_id: int) -> Frame: if not self.frame or self.current_id !=",
"as debug from virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource):",
"\"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig =",
"\"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig(",
"grabbed = False while not grabbed: grabbed, image = self.camera.read() self.frame = Frame(self.config,",
"c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig)",
"c4 = \"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC,",
"-1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\", \"J\", \"P\",",
"FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1",
"!= frame_id: grabbed = False while not grabbed: grabbed, image = self.camera.read() self.frame",
"False while not grabbed: grabbed, image = self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask)",
"Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return True def next(self,",
"image = self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id = frame_id # debug.frame(f\"Webcam:next[{frame_id}]\",",
"from virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self):",
"if not self.frame or self.current_id != frame_id: grabbed = False while not grabbed:",
") self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) ->",
"codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width,",
"= StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask)",
"StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\",",
"Frame: if not self.frame or self.current_id != frame_id: grabbed = False while not",
"import virtcam.debug as debug from virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig, immutable",
"camConfig) def grab(self) -> bool: return True def next(self, frame_id: int) -> Frame:",
"next(self, frame_id: int) -> Frame: if not self.frame or self.current_id != frame_id: grabbed",
"cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\", \"J\", \"P\", \"G\" codec =",
"cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1,",
"grabbed, image = self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id = frame_id #",
"virtcam.debug as debug from virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig, immutable class",
"camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height),",
"= self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id = frame_id # debug.frame(f\"Webcam:next[{frame_id}]\", self.frame)",
"= Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return True",
"-> Frame: if not self.frame or self.current_id != frame_id: grabbed = False while",
"def grab(self) -> bool: return True def next(self, frame_id: int) -> Frame: if",
"bool: return True def next(self, frame_id: int) -> Frame: if not self.frame or",
"\"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)),",
"int) -> Frame: if not self.frame or self.current_id != frame_id: grabbed = False",
"self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool:",
"return True def next(self, frame_id: int) -> Frame: if not self.frame or self.current_id",
"self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return",
"c2, c3, c4 = \"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3,",
"self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\", \"J\", \"P\", \"G\"",
"not self.frame or self.current_id != frame_id: grabbed = False while not grabbed: grabbed,",
"self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\",",
"class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1,",
"grab(self) -> bool: return True def next(self, frame_id: int) -> Frame: if not",
"def next(self, frame_id: int) -> Frame: if not self.frame or self.current_id != frame_id:",
"frame_id: int) -> Frame: if not self.frame or self.current_id != frame_id: grabbed =",
"debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return True def next(self, frame_id: int) ->",
"Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1 self.camera",
"c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame =",
"self.current_id != frame_id: grabbed = False while not grabbed: grabbed, image = self.camera.read()",
"c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame",
"Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1 self.camera =",
"StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id = -1 self.camera = cv2.VideoCapture(\"/dev/video0\",",
"self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id = frame_id # debug.frame(f\"Webcam:next[{frame_id}]\", self.frame) return self.frame",
"int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)), int(self.camera.get(cv2.CAP_PROP_FPS)), ) self._init_config(camConfig) self.frame = Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def",
"virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__()",
"cv2 import virtcam.debug as debug from virtcam.base import Frame, FrameSource, Image, Mask, StreamConfig,",
"codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig = StreamConfig( int(self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)), int(self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)),",
"\"M\", \"J\", \"P\", \"G\" codec = cv2.VideoWriter_fourcc(c1, c2, c3, c4) self.camera.set(cv2.CAP_PROP_FOURCC, codec) camConfig",
"Frame(self.config, Image(self.config.width, self.config.height), self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return True def",
"Frame, FrameSource, Image, Mask, StreamConfig, immutable class Webcam(FrameSource): def __init__(self): super().__init__() self.current_id =",
"or self.current_id != frame_id: grabbed = False while not grabbed: grabbed, image =",
"grabbed: grabbed, image = self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id = frame_id",
"= -1 self.camera = cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\", \"J\",",
"self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id = frame_id # debug.frame(f\"Webcam:next[{frame_id}]\", self.frame) return",
"while not grabbed: grabbed, image = self.camera.read() self.frame = Frame(self.config, immutable(image), self.fullmask) self.current_id",
"= cv2.VideoCapture(\"/dev/video0\", cv2.CAP_V4L2) c1, c2, c3, c4 = \"M\", \"J\", \"P\", \"G\" codec",
"self.fullmask) debug.config(\"Webcam:init:config\", camConfig) def grab(self) -> bool: return True def next(self, frame_id: int)"
] |
[
"FillPackagesetVersions(packages): versions = set() families = set() for package in packages: if not",
"in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for version in",
"PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return",
"PURPOSE. See the # GNU General Public License for more details. # #",
"is currently lonely; should it be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class",
"by # the Free Software Foundation, either version 3 of the License, or",
"is None or VersionCompare(version, bestversion) > 0: bestversion = version for package in",
"resulting_class = RepositoryVersionClass.outdated elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass ==",
"= True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] =",
"aggregated.setdefault(key, []).append(package) outpkgs = [] for packages in aggregated.values(): while packages: nextpackages =",
"for package in packages]) def PackagesetAggregateByVersions(packages): versions = {} for package in packages:",
"VersionCompare def PackagesMerge(packages): aggregated = {} # aggregate by subrepo/name/version # this is",
"elif result == 0: # XXX: if len(families) == 1 -> PackageVersionClass.unique package.versionclass",
"return True def FillPackagesetVersions(packages): versions = set() families = set() for package in",
"resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages':",
"= [] merged = packages[0] for package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package)",
"outpkgs.append(merged) packages = nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for filt in filters:",
"PackagesetToSummaries(packages): summary = {} state_by_repo = {} families = set() for package in",
"PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0:",
"{} families = set() for package in packages: families.add(package.family) if package.repo not in",
"of the GNU General Public License # along with repology. If not, see",
"1 for repo, state in state_by_repo.items(): resulting_class = None # XXX: lonely ignored",
"resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else:",
"return VersionCompare(v2[0], v1[0]) return [ { 'version': key[0], 'versionclass': key[1], 'packages': versions[key] }",
"v1[0]) return [ { 'version': key[0], 'versionclass': key[1], 'packages': versions[key] } for key",
"import VersionCompare def PackagesMerge(packages): aggregated = {} # aggregate by subrepo/name/version # this",
"the Free Software Foundation, either version 3 of the License, or # (at",
"should have received a copy of the GNU General Public License # along",
"# this is just to make merging faster, as packages # with same",
"received a copy of the GNU General Public License # along with repology.",
"PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {}",
"without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR",
"package state_by_repo[package.repo]['count'] += 1 for repo, state in state_by_repo.items(): resulting_class = None #",
"'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages):",
"for package in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for",
"= package state_by_repo[package.repo]['count'] += 1 for repo, state in state_by_repo.items(): resulting_class = None",
"[]).append(package) outpkgs = [] for packages in aggregated.values(): while packages: nextpackages = []",
"not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None, 'count': 0 }",
"state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely",
"state in state_by_repo.items(): resulting_class = None # XXX: lonely ignored package is currently",
"state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for repo, state in state_by_repo.items(): resulting_class =",
"'bestpackage': None, 'count': 0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if",
"if bestversion is None or VersionCompare(version, bestversion) > 0: bestversion = version for",
"if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) == 1: resulting_class =",
"You should have received a copy of the GNU General Public License #",
"repology.version import VersionCompare def PackagesMerge(packages): aggregated = {} # aggregate by subrepo/name/version #",
"Foundation, either version 3 of the License, or # (at your option) any",
"state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif",
"is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1",
"details. # # You should have received a copy of the GNU General",
"(package.version, package.versionclass) if key not in versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1,",
"def PackagesetCheckFilters(packages, *filters): for filt in filters: if not filt.Check(packages): return False return",
"WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A",
"ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) == 1:",
"from repology.package import * from repology.version import VersionCompare def PackagesMerge(packages): aggregated = {}",
"that it will be useful, # but WITHOUT ANY WARRANTY; without even the",
"key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for packages in",
"summary = {} state_by_repo = {} families = set() for package in packages:",
"# repology is free software: you can redistribute it and/or modify # it",
"the terms of the GNU General Public License as published by # the",
"summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def",
"is not None else 1 if result > 0: package.versionclass = PackageVersionClass.ignored elif",
"import sys from functools import cmp_to_key from repology.package import * from repology.version import",
"PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass ==",
"is part of repology # # repology is free software: you can redistribute",
"merging faster, as packages # with same subrepo/name/version may or may not merge",
"implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the",
"'versionclass': resulting_class, 'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return",
"} if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None or",
"see <http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key from repology.package import * from",
"a copy of the GNU General Public License # along with repology. If",
"= RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count']",
"None for version in versions: if bestversion is None or VersionCompare(version, bestversion) >",
"the hope that it will be useful, # but WITHOUT ANY WARRANTY; without",
"RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class =",
"len(families) == 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def",
"PackagesMerge(packages): aggregated = {} # aggregate by subrepo/name/version # this is just to",
"PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo = {}",
"filt.Check(packages): return False return True def FillPackagesetVersions(packages): versions = set() families = set()",
"merged = packages[0] for package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages",
"repology. If not, see <http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key from repology.package",
"# You should have received a copy of the GNU General Public License",
"= {} # aggregate by subrepo/name/version # this is just to make merging",
"'count': 0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is",
"> 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for repo, state in state_by_repo.items():",
"package.versionclass) if key not in versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2):",
"3 of the License, or # (at your option) any later version. #",
"A PARTICULAR PURPOSE. See the # GNU General Public License for more details.",
"elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest",
"in packages: families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False,",
"bestversion = version for package in packages: result = VersionCompare(package.version, bestversion) if bestversion",
"versions = {} for package in packages: key = (package.version, package.versionclass) if key",
"packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for",
"in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs def",
"= version for package in packages: result = VersionCompare(package.version, bestversion) if bestversion is",
"PackageVersionClass.ignored elif result == 0: # XXX: if len(families) == 1 -> PackageVersionClass.unique",
"General Public License as published by # the Free Software Foundation, either version",
"packages[0] for package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages",
"= {} state_by_repo = {} families = set() for package in packages: families.add(package.family)",
"software: you can redistribute it and/or modify # it under the terms of",
"in packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for",
"= [] for packages in aggregated.values(): while packages: nextpackages = [] merged =",
"= RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class",
"it be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families)",
"= { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return summary",
"repology.package import * from repology.version import VersionCompare def PackagesMerge(packages): aggregated = {} #",
"# GNU General Public License for more details. # # You should have",
"package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for packages in aggregated.values(): while packages:",
"result == 0: # XXX: if len(families) == 1 -> PackageVersionClass.unique package.versionclass =",
"This file is part of repology # # repology is free software: you",
"RepositoryVersionClass.outdated elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if",
"key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package in packages]) def PackagesetAggregateByVersions(packages): versions =",
"it and/or modify # it under the terms of the GNU General Public",
"Public License # along with repology. If not, see <http://www.gnu.org/licenses/>. import sys from",
"# it under the terms of the GNU General Public License as published",
"repology # # repology is free software: you can redistribute it and/or modify",
"aggregated = {} # aggregate by subrepo/name/version # this is just to make",
"= set() for package in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion =",
"packages]) def PackagesetAggregateByVersions(packages): versions = {} for package in packages: key = (package.version,",
"filt in filters: if not filt.Check(packages): return False return True def FillPackagesetVersions(packages): versions",
"free software: you can redistribute it and/or modify # it under the terms",
"elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage':",
"return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package in packages]) def PackagesetAggregateByVersions(packages):",
"def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages):",
"-> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary =",
"# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General",
"for package in packages: result = VersionCompare(package.version, bestversion) if bestversion is not None",
"# # This file is part of repology # # repology is free",
"General Public License # along with repology. If not, see <http://www.gnu.org/licenses/>. import sys",
"the License, or # (at your option) any later version. # # repology",
"* from repology.version import VersionCompare def PackagesMerge(packages): aggregated = {} # aggregate by",
"= set() for package in packages: families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo]",
"package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for version in versions: if bestversion is",
"= VersionCompare(package.version, bestversion) if bestversion is not None else 1 if result >",
"0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None",
"bestversion is not None else 1 if result > 0: package.versionclass = PackageVersionClass.ignored",
"the GNU General Public License as published by # the Free Software Foundation,",
"None # XXX: lonely ignored package is currently lonely; should it be ignored",
"= [] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [ { 'version':",
"PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class,",
"warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #",
"or VersionCompare(version, bestversion) > 0: bestversion = version for package in packages: result",
"# the Free Software Foundation, either version 3 of the License, or #",
"<filename>Toolkits/VCS/repology__repology-api/repology/packageproc.py # Copyright (C) 2016-2017 <NAME> <<EMAIL>> # # This file is part",
"== 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages):",
"PackagesetAggregateByVersions(packages): versions = {} for package in packages: key = (package.version, package.versionclass) if",
"families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None,",
"return False return True def FillPackagesetVersions(packages): versions = set() families = set() for",
"cmp_to_key from repology.package import * from repology.version import VersionCompare def PackagesMerge(packages): aggregated =",
"lonely; should it be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated",
"in state_by_repo.items(): resulting_class = None # XXX: lonely ignored package is currently lonely;",
"= (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for packages in aggregated.values():",
"if not filt.Check(packages): return False return True def FillPackagesetVersions(packages): versions = set() families",
"<<EMAIL>> # # This file is part of repology # # repology is",
"result > 0: package.versionclass = PackageVersionClass.ignored elif result == 0: # XXX: if",
"False, 'bestpackage': None, 'count': 0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True,",
"if len(families) == 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated",
"subrepo/name/version may or may not merge for package in packages: key = (package.subrepo,",
"it will be useful, # but WITHOUT ANY WARRANTY; without even the implied",
"state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None, 'count': 0 } if package.versionclass ==",
"package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version)",
"not None else 1 if result > 0: package.versionclass = PackageVersionClass.ignored elif result",
"later version. # # repology is distributed in the hope that it will",
"versions.add(package.version) families.add(package.family) bestversion = None for version in versions: if bestversion is None",
"if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored:",
"in packages]) def PackagesetAggregateByVersions(packages): versions = {} for package in packages: key =",
"General Public License for more details. # # You should have received a",
"0: bestversion = version for package in packages: result = VersionCompare(package.version, bestversion) if",
"'version': key[0], 'versionclass': key[1], 'packages': versions[key] } for key in sorted(versions.keys(), key=cmp_to_key(key_cmp_reverse)) ]",
"for packages in aggregated.values(): while packages: nextpackages = [] merged = packages[0] for",
"functools import cmp_to_key from repology.package import * from repology.version import VersionCompare def PackagesMerge(packages):",
"the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See",
"None else 1 if result > 0: package.versionclass = PackageVersionClass.ignored elif result ==",
"state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] +=",
"state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class",
"faster, as packages # with same subrepo/name/version may or may not merge for",
"True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package",
"package is currently lonely; should it be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated:",
"package in packages: families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated':",
"for package in packages: families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo] = {",
"same subrepo/name/version may or may not merge for package in packages: key =",
"'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version)",
"with same subrepo/name/version may or may not merge for package in packages: key",
"if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters):",
"set([package.family for package in packages]) def PackagesetAggregateByVersions(packages): versions = {} for package in",
"[] for packages in aggregated.values(): while packages: nextpackages = [] merged = packages[0]",
"the # GNU General Public License for more details. # # You should",
"0: # XXX: if len(families) == 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else:",
"or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License",
"for more details. # # You should have received a copy of the",
"== 0: # XXX: if len(families) == 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest",
"# with same subrepo/name/version may or may not merge for package in packages:",
"VersionCompare(version, bestversion) > 0: bestversion = version for package in packages: result =",
"instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) == 1: resulting_class",
"License, or # (at your option) any later version. # # repology is",
"result = VersionCompare(package.version, bestversion) if bestversion is not None else 1 if result",
"if key not in versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2): return",
"[] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [ { 'version': key[0],",
"merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for filt",
"more details. # # You should have received a copy of the GNU",
"of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU",
"GNU General Public License for more details. # # You should have received",
"RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] }",
"packages: result = VersionCompare(package.version, bestversion) if bestversion is not None else 1 if",
"set() for package in packages: families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo] =",
"# # repology is free software: you can redistribute it and/or modify #",
"under the terms of the GNU General Public License as published by #",
"versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [ {",
"= nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for filt in filters: if not",
"VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package in packages])",
"state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None, 'count': 0 } if package.versionclass",
"Free Software Foundation, either version 3 of the License, or # (at your",
"summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return",
"for package in packages: key = (package.version, package.versionclass) if key not in versions:",
"lonely ignored package is currently lonely; should it be ignored instead? if state['bestpackage'].versionclass",
"key = (package.version, package.versionclass) if key not in versions: versions[key] = [] versions[key].append(package)",
"distributed in the hope that it will be useful, # but WITHOUT ANY",
"nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for filt in filters: if not filt.Check(packages):",
"either version 3 of the License, or # (at your option) any later",
"*filters): for filt in filters: if not filt.Check(packages): return False return True def",
"False return True def FillPackagesetVersions(packages): versions = set() families = set() for package",
"published by # the Free Software Foundation, either version 3 of the License,",
"= None for version in versions: if bestversion is None or VersionCompare(version, bestversion)",
"= set() families = set() for package in packages: if not package.ignoreversion: versions.add(package.version)",
"not merge for package in packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package)",
"return [ { 'version': key[0], 'versionclass': key[1], 'packages': versions[key] } for key in",
"for repo, state in state_by_repo.items(): resulting_class = None # XXX: lonely ignored package",
"PackagesetToFamilies(packages): return set([package.family for package in packages]) def PackagesetAggregateByVersions(packages): versions = {} for",
"it under the terms of the GNU General Public License as published by",
"= (package.version, package.versionclass) if key not in versions: versions[key] = [] versions[key].append(package) def",
"packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for version in versions:",
"should it be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif",
"# # repology is distributed in the hope that it will be useful,",
"you can redistribute it and/or modify # it under the terms of the",
"= PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo = {} families = set()",
"of the GNU General Public License as published by # the Free Software",
"PackagesetCheckFilters(packages, *filters): for filt in filters: if not filt.Check(packages): return False return True",
"Public License as published by # the Free Software Foundation, either version 3",
"state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2):",
"in packages: result = VersionCompare(package.version, bestversion) if bestversion is not None else 1",
"of the License, or # (at your option) any later version. # #",
"PARTICULAR PURPOSE. See the # GNU General Public License for more details. #",
"not filt.Check(packages): return False return True def FillPackagesetVersions(packages): versions = set() families =",
"for filt in filters: if not filt.Check(packages): return False return True def FillPackagesetVersions(packages):",
"PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo = {} families = set() for",
"not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for",
"def PackagesMerge(packages): aggregated = {} # aggregate by subrepo/name/version # this is just",
"(at your option) any later version. # # repology is distributed in the",
"if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version,",
"== PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif",
"repology is free software: you can redistribute it and/or modify # it under",
"package in packages: result = VersionCompare(package.version, bestversion) if bestversion is not None else",
"hope that it will be useful, # but WITHOUT ANY WARRANTY; without even",
"# Copyright (C) 2016-2017 <NAME> <<EMAIL>> # # This file is part of",
"v2): return VersionCompare(v2[0], v1[0]) return [ { 'version': key[0], 'versionclass': key[1], 'packages': versions[key]",
"bestversion) if bestversion is not None else 1 if result > 0: package.versionclass",
"1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed",
"if result > 0: package.versionclass = PackageVersionClass.ignored elif result == 0: # XXX:",
"= packages[0] for package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages =",
"as packages # with same subrepo/name/version may or may not merge for package",
"in filters: if not filt.Check(packages): return False return True def FillPackagesetVersions(packages): versions =",
"versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [ { 'version': key[0], 'versionclass':",
"{ 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return summary def",
"p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package",
"in packages: key = (package.version, package.versionclass) if key not in versions: versions[key] =",
"useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #",
"filters: if not filt.Check(packages): return False return True def FillPackagesetVersions(packages): versions = set()",
"packages = nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for filt in filters: if",
"= None # XXX: lonely ignored package is currently lonely; should it be",
"[ { 'version': key[0], 'versionclass': key[1], 'packages': versions[key] } for key in sorted(versions.keys(),",
"state_by_repo = {} families = set() for package in packages: families.add(package.family) if package.repo",
"be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) ==",
"# This file is part of repology # # repology is free software:",
"packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for packages",
"is just to make merging faster, as packages # with same subrepo/name/version may",
"the GNU General Public License # along with repology. If not, see <http://www.gnu.org/licenses/>.",
"package.versionclass = PackageVersionClass.ignored elif result == 0: # XXX: if len(families) == 1",
"} return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages,",
"just to make merging faster, as packages # with same subrepo/name/version may or",
"> 0: package.versionclass = PackageVersionClass.ignored elif result == 0: # XXX: if len(families)",
"even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.",
"# XXX: if len(families) == 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass",
"package.repo not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None, 'count': 0",
"== PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) >",
"1 if result > 0: package.versionclass = PackageVersionClass.ignored elif result == 0: #",
"resulting_class, 'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version,",
"{ 'has_outdated': False, 'bestpackage': None, 'count': 0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated']",
"in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None, 'count': 0 } if",
"return set([package.family for package in packages]) def PackagesetAggregateByVersions(packages): versions = {} for package",
"option) any later version. # # repology is distributed in the hope that",
"package in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for version",
"packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs def PackagesetCheckFilters(packages,",
"else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] =",
"outpkgs = [] for packages in aggregated.values(): while packages: nextpackages = [] merged",
"# (at your option) any later version. # # repology is distributed in",
"else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo = {} families",
"(C) 2016-2017 <NAME> <<EMAIL>> # # This file is part of repology #",
"or may not merge for package in packages: key = (package.subrepo, package.name, package.version)",
"1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary",
"{ 'version': key[0], 'versionclass': key[1], 'packages': versions[key] } for key in sorted(versions.keys(), key=cmp_to_key(key_cmp_reverse))",
"by subrepo/name/version # this is just to make merging faster, as packages #",
"versions = set() families = set() for package in packages: if not package.ignoreversion:",
"def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [ { 'version': key[0], 'versionclass': key[1],",
"RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo]",
"be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of",
"may or may not merge for package in packages: key = (package.subrepo, package.name,",
"VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for repo, state",
"= RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version':",
"package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for packages in aggregated.values(): while packages: nextpackages",
"while packages: nextpackages = [] merged = packages[0] for package in packages[1:]: if",
"else 1 if result > 0: package.versionclass = PackageVersionClass.ignored elif result == 0:",
"is distributed in the hope that it will be useful, # but WITHOUT",
"bestversion) > 0: bestversion = version for package in packages: result = VersionCompare(package.version,",
"MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public",
"or # (at your option) any later version. # # repology is distributed",
"part of repology # # repology is free software: you can redistribute it",
"in the hope that it will be useful, # but WITHOUT ANY WARRANTY;",
"def PackagesetToSummaries(packages): summary = {} state_by_repo = {} families = set() for package",
"= { 'has_outdated': False, 'bestpackage': None, 'count': 0 } if package.versionclass == PackageVersionClass.outdated:",
"<http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key from repology.package import * from repology.version",
"FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for",
"and/or modify # it under the terms of the GNU General Public License",
"package in packages: key = (package.version, package.versionclass) if key not in versions: versions[key]",
"repology is distributed in the hope that it will be useful, # but",
"outpkgs def PackagesetCheckFilters(packages, *filters): for filt in filters: if not filt.Check(packages): return False",
"GNU General Public License # along with repology. If not, see <http://www.gnu.org/licenses/>. import",
"# repology is distributed in the hope that it will be useful, #",
"len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class",
"not in versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0])",
"for version in versions: if bestversion is None or VersionCompare(version, bestversion) > 0:",
"2016-2017 <NAME> <<EMAIL>> # # This file is part of repology # #",
"None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for",
"XXX: if len(families) == 1 -> PackageVersionClass.unique package.versionclass = PackageVersionClass.newest else: package.versionclass =",
"version. # # repology is distributed in the hope that it will be",
"any later version. # # repology is distributed in the hope that it",
"families = set() for package in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion",
"None or VersionCompare(version, bestversion) > 0: bestversion = version for package in packages:",
"if package.repo not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage': None, 'count':",
"== 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']: resulting_class =",
"can redistribute it and/or modify # it under the terms of the GNU",
"Software Foundation, either version 3 of the License, or # (at your option)",
"VersionCompare(v2[0], v1[0]) return [ { 'version': key[0], 'versionclass': key[1], 'packages': versions[key] } for",
"in aggregated.values(): while packages: nextpackages = [] merged = packages[0] for package in",
"not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for version in versions: if bestversion",
"but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or",
"state_by_repo.items(): resulting_class = None # XXX: lonely ignored package is currently lonely; should",
"make merging faster, as packages # with same subrepo/name/version may or may not",
"version 3 of the License, or # (at your option) any later version.",
"or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for repo,",
"state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'],",
"resulting_class = None # XXX: lonely ignored package is currently lonely; should it",
"terms of the GNU General Public License as published by # the Free",
"if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count']",
"nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs def PackagesetCheckFilters(packages, *filters): for filt in",
"GNU General Public License as published by # the Free Software Foundation, either",
"return outpkgs def PackagesetCheckFilters(packages, *filters): for filt in filters: if not filt.Check(packages): return",
"if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None for version in versions: if",
"set() for package in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family) bestversion = None",
"bestversion = None for version in versions: if bestversion is None or VersionCompare(version,",
"this is just to make merging faster, as packages # with same subrepo/name/version",
"def PackagesetToFamilies(packages): return set([package.family for package in packages]) def PackagesetAggregateByVersions(packages): versions = {}",
"License for more details. # # You should have received a copy of",
"may not merge for package in packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key,",
"If not, see <http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key from repology.package import",
"packages in aggregated.values(): while packages: nextpackages = [] merged = packages[0] for package",
"sys from functools import cmp_to_key from repology.package import * from repology.version import VersionCompare",
"= {} families = set() for package in packages: families.add(package.family) if package.repo not",
"currently lonely; should it be ignored instead? if state['bestpackage'].versionclass == PackageVersionClass.outdated: resulting_class =",
"= RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored",
"import cmp_to_key from repology.package import * from repology.version import VersionCompare def PackagesMerge(packages): aggregated",
"aggregate by subrepo/name/version # this is just to make merging faster, as packages",
"packages: families.add(package.family) if package.repo not in state_by_repo: state_by_repo[package.repo] = { 'has_outdated': False, 'bestpackage':",
"<NAME> <<EMAIL>> # # This file is part of repology # # repology",
"redistribute it and/or modify # it under the terms of the GNU General",
"modify # it under the terms of the GNU General Public License as",
"(package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = [] for packages in aggregated.values(): while",
"nextpackages = [] merged = packages[0] for package in packages[1:]: if not merged.TryMerge(package):",
"XXX: lonely ignored package is currently lonely; should it be ignored instead? if",
"== PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass':",
"state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage'] is None or VersionCompare(package.version, state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage']",
"package in packages]) def PackagesetAggregateByVersions(packages): versions = {} for package in packages: key",
"# aggregate by subrepo/name/version # this is just to make merging faster, as",
"is free software: you can redistribute it and/or modify # it under the",
"[] merged = packages[0] for package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged)",
"file is part of repology # # repology is free software: you can",
"if bestversion is not None else 1 if result > 0: package.versionclass =",
"VersionCompare(package.version, bestversion) if bestversion is not None else 1 if result > 0:",
"= PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo =",
"package in packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs = []",
"from functools import cmp_to_key from repology.package import * from repology.version import VersionCompare def",
"state['bestpackage'].version, 'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages): def",
"repo, state in state_by_repo.items(): resulting_class = None # XXX: lonely ignored package is",
"'bestpackage': state['bestpackage'], 'versionclass': resulting_class, 'numpackages': state['count'] } return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1,",
"versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [",
"package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo = {} families =",
"PackageVersionClass.outdated: resulting_class = RepositoryVersionClass.outdated elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass",
"your option) any later version. # # repology is distributed in the hope",
"packages # with same subrepo/name/version may or may not merge for package in",
"to make merging faster, as packages # with same subrepo/name/version may or may",
"package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return outpkgs",
"package.versionclass = PackageVersionClass.newest else: package.versionclass = PackageVersionClass.outdated def PackagesetToSummaries(packages): summary = {} state_by_repo",
"See the # GNU General Public License for more details. # # You",
"sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package in packages]) def PackagesetAggregateByVersions(packages): versions",
"> 0: bestversion = version for package in packages: result = VersionCompare(package.version, bestversion)",
"{} state_by_repo = {} families = set() for package in packages: families.add(package.family) if",
"bestversion is None or VersionCompare(version, bestversion) > 0: bestversion = version for package",
"with repology. If not, see <http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key from",
"License as published by # the Free Software Foundation, either version 3 of",
"from repology.version import VersionCompare def PackagesMerge(packages): aggregated = {} # aggregate by subrepo/name/version",
"as published by # the Free Software Foundation, either version 3 of the",
"resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = {",
"+= 1 for repo, state in state_by_repo.items(): resulting_class = None # XXX: lonely",
"= {} for package in packages: key = (package.version, package.versionclass) if key not",
"merge for package in packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs",
"# XXX: lonely ignored package is currently lonely; should it be ignored instead?",
"ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR",
"ignored package is currently lonely; should it be ignored instead? if state['bestpackage'].versionclass ==",
"resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class =",
"not, see <http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key from repology.package import *",
"state_by_repo[package.repo]['count'] += 1 for repo, state in state_by_repo.items(): resulting_class = None # XXX:",
"= PackageVersionClass.ignored elif result == 0: # XXX: if len(families) == 1 ->",
"version in versions: if bestversion is None or VersionCompare(version, bestversion) > 0: bestversion",
"0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for repo, state in state_by_repo.items(): resulting_class",
"subrepo/name/version # this is just to make merging faster, as packages # with",
"WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS",
"# # You should have received a copy of the GNU General Public",
"in versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return",
"for package in packages: key = (package.subrepo, package.name, package.version) aggregated.setdefault(key, []).append(package) outpkgs =",
"FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more",
"elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest: if state['has_outdated']:",
"have received a copy of the GNU General Public License # along with",
"along with repology. If not, see <http://www.gnu.org/licenses/>. import sys from functools import cmp_to_key",
"families = set() for package in packages: families.add(package.family) if package.repo not in state_by_repo:",
"{} for package in packages: key = (package.version, package.versionclass) if key not in",
"Public License for more details. # # You should have received a copy",
"return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package in",
"= RepositoryVersionClass.outdated elif len(families) == 1: resulting_class = RepositoryVersionClass.lonely elif state['bestpackage'].versionclass == PackageVersionClass.newest:",
"import * from repology.version import VersionCompare def PackagesMerge(packages): aggregated = {} # aggregate",
"packages: key = (package.version, package.versionclass) if key not in versions: versions[key] = []",
"None, 'count': 0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] = True, if state_by_repo[package.repo]['bestpackage']",
"p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family for package in packages]) def",
"versions: if bestversion is None or VersionCompare(version, bestversion) > 0: bestversion = version",
"key_cmp_reverse(v1, v2): return VersionCompare(v2[0], v1[0]) return [ { 'version': key[0], 'versionclass': key[1], 'packages':",
"== PackageVersionClass.newest: if state['has_outdated']: resulting_class = RepositoryVersionClass.mixed else: resulting_class = RepositoryVersionClass.newest elif state['bestpackage'].versionclass",
"version for package in packages: result = VersionCompare(package.version, bestversion) if bestversion is not",
"return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse))",
"state_by_repo[package.repo]['bestpackage'].version) > 0: state_by_repo[package.repo]['bestpackage'] = package state_by_repo[package.repo]['count'] += 1 for repo, state in",
"state['count'] } return summary def PackagesetSortByVersions(packages): def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return",
"in versions: if bestversion is None or VersionCompare(version, bestversion) > 0: bestversion =",
"def packages_version_cmp_reverse(p1, p2): return VersionCompare(p2.version, p1.version) return sorted(packages, key=cmp_to_key(packages_version_cmp_reverse)) def PackagesetToFamilies(packages): return set([package.family",
"# along with repology. If not, see <http://www.gnu.org/licenses/>. import sys from functools import",
"True def FillPackagesetVersions(packages): versions = set() families = set() for package in packages:",
"aggregated.values(): while packages: nextpackages = [] merged = packages[0] for package in packages[1:]:",
"packages: nextpackages = [] merged = packages[0] for package in packages[1:]: if not",
"{} # aggregate by subrepo/name/version # this is just to make merging faster,",
"families.add(package.family) bestversion = None for version in versions: if bestversion is None or",
"License # along with repology. If not, see <http://www.gnu.org/licenses/>. import sys from functools",
"set() families = set() for package in packages: if not package.ignoreversion: versions.add(package.version) families.add(package.family)",
"will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty",
"key not in versions: versions[key] = [] versions[key].append(package) def key_cmp_reverse(v1, v2): return VersionCompare(v2[0],",
"'has_outdated': False, 'bestpackage': None, 'count': 0 } if package.versionclass == PackageVersionClass.outdated: state_by_repo[package.repo]['has_outdated'] =",
"Copyright (C) 2016-2017 <NAME> <<EMAIL>> # # This file is part of repology",
"0: package.versionclass = PackageVersionClass.ignored elif result == 0: # XXX: if len(families) ==",
"# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY",
"copy of the GNU General Public License # along with repology. If not,",
"def PackagesetAggregateByVersions(packages): versions = {} for package in packages: key = (package.version, package.versionclass)",
"of repology # # repology is free software: you can redistribute it and/or",
"for package in packages[1:]: if not merged.TryMerge(package): nextpackages.append(package) outpkgs.append(merged) packages = nextpackages return",
"def FillPackagesetVersions(packages): versions = set() families = set() for package in packages: if",
"RepositoryVersionClass.newest elif state['bestpackage'].versionclass == PackageVersionClass.ignored: resulting_class = RepositoryVersionClass.ignored summary[repo] = { 'version': state['bestpackage'].version,"
] |
[
"def softmax(src): # Get size of input vector rows, cols = src.shape #",
"Exception(\"Input rows > 1\") # Find relu derivative retMat = np.zeros((cols, cols)) for",
"Exception(\"Input rows > 1\") # Find relu retVec = np.zeros((1, cols)) for i",
"range(cols): if src[0, i] < 0.0: retMat[i, i] = 0 else: retMat[i, i]",
"input vector rows, cols = src.shape # Checking if rows > 1: raise",
"rows, cols = src.shape # Checking if rows > 1: raise Exception(\"Input rows",
"# Get size of input vector rows, cols = src.shape # Checking if",
"rows > 1: raise Exception(\"Input rows > 1\") # Find softmax derivative tmpVec",
"rows > 1: raise Exception(\"Input rows > 1\") # Find relu derivative retMat",
"> 1\") # Find softmax derivative tmpVec = softmax(src) retMat = np.zeros((cols, cols))",
"> 1: raise Exception(\"Input rows > 1\") # Find relu derivative retMat =",
"import math def softmax(src): # Get size of input vector rows, cols =",
"of input vector rows, cols = src.shape # Checking if rows > 1:",
"max(src[0, i], 0.0) return retVec def relu_derivative(src): # Get size of input vector",
"j)) - tmpVec[0, j]) return retMat def relu(src): # Get size of input",
"for i in range(cols): for j in range(cols): retMat[i, j] = tmpVec[0, i]",
"Find relu retVec = np.zeros((1, cols)) for i in range(cols): retVec[0, i] =",
"Get size of input vector rows, cols = src.shape # Checking if rows",
"if rows > 1: raise Exception(\"Input rows > 1\") # Find softmax expVec",
"def relu(src): # Get size of input vector rows, cols = src.shape #",
"return retMat def relu(src): # Get size of input vector rows, cols =",
"= np.zeros((1, cols)) for i in range(cols): retVec[0, i] = max(src[0, i], 0.0)",
"tmpVec[0, i] * (float((i == j)) - tmpVec[0, j]) return retMat def relu(src):",
"rows > 1\") # Find softmax expVec = np.exp(src) return expVec / np.sum(expVec)",
"if rows > 1: raise Exception(\"Input rows > 1\") # Find softmax derivative",
"< 0.0: retMat[i, i] = 0 else: retMat[i, i] = 1 return retMat",
"relu_derivative(src): # Get size of input vector rows, cols = src.shape # Checking",
"in range(cols): for j in range(cols): retMat[i, j] = tmpVec[0, i] * (float((i",
"if src[0, i] < 0.0: retMat[i, i] = 0 else: retMat[i, i] =",
"rows > 1: raise Exception(\"Input rows > 1\") # Find softmax expVec =",
"derivative retMat = np.zeros((cols, cols)) for i in range(cols): if src[0, i] <",
"for i in range(cols): retVec[0, i] = max(src[0, i], 0.0) return retVec def",
"as np import math def softmax(src): # Get size of input vector rows,",
"1: raise Exception(\"Input rows > 1\") # Find relu derivative retMat = np.zeros((cols,",
"(float((i == j)) - tmpVec[0, j]) return retMat def relu(src): # Get size",
"cols)) for i in range(cols): if src[0, i] < 0.0: retMat[i, i] =",
"= src.shape # Checking if rows > 1: raise Exception(\"Input rows > 1\")",
"# Find softmax expVec = np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src): #",
"in range(cols): if src[0, i] < 0.0: retMat[i, i] = 0 else: retMat[i,",
"raise Exception(\"Input rows > 1\") # Find relu retVec = np.zeros((1, cols)) for",
"= softmax(src) retMat = np.zeros((cols, cols)) for i in range(cols): for j in",
"range(cols): retMat[i, j] = tmpVec[0, i] * (float((i == j)) - tmpVec[0, j])",
"range(cols): retVec[0, i] = max(src[0, i], 0.0) return retVec def relu_derivative(src): # Get",
"1\") # Find softmax derivative tmpVec = softmax(src) retMat = np.zeros((cols, cols)) for",
"1: raise Exception(\"Input rows > 1\") # Find relu retVec = np.zeros((1, cols))",
"- tmpVec[0, j]) return retMat def relu(src): # Get size of input vector",
"Find relu derivative retMat = np.zeros((cols, cols)) for i in range(cols): if src[0,",
"rows > 1\") # Find relu derivative retMat = np.zeros((cols, cols)) for i",
"i] = max(src[0, i], 0.0) return retVec def relu_derivative(src): # Get size of",
"def relu_derivative(src): # Get size of input vector rows, cols = src.shape #",
"j in range(cols): retMat[i, j] = tmpVec[0, i] * (float((i == j)) -",
"# Find softmax derivative tmpVec = softmax(src) retMat = np.zeros((cols, cols)) for i",
"/ np.sum(expVec) def softmax_derivative(src): # Get size of input vector rows, cols =",
"derivative tmpVec = softmax(src) retMat = np.zeros((cols, cols)) for i in range(cols): for",
"cols = src.shape # Checking if rows > 1: raise Exception(\"Input rows >",
"Checking if rows > 1: raise Exception(\"Input rows > 1\") # Find relu",
"if rows > 1: raise Exception(\"Input rows > 1\") # Find relu retVec",
"> 1\") # Find relu retVec = np.zeros((1, cols)) for i in range(cols):",
"softmax(src): # Get size of input vector rows, cols = src.shape # Checking",
"np.sum(expVec) def softmax_derivative(src): # Get size of input vector rows, cols = src.shape",
"np.zeros((cols, cols)) for i in range(cols): if src[0, i] < 0.0: retMat[i, i]",
"rows > 1\") # Find softmax derivative tmpVec = softmax(src) retMat = np.zeros((cols,",
"1\") # Find relu retVec = np.zeros((1, cols)) for i in range(cols): retVec[0,",
"= np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src): # Get size of input",
"src[0, i] < 0.0: retMat[i, i] = 0 else: retMat[i, i] = 1",
"== j)) - tmpVec[0, j]) return retMat def relu(src): # Get size of",
"softmax_derivative(src): # Get size of input vector rows, cols = src.shape # Checking",
"tmpVec = softmax(src) retMat = np.zeros((cols, cols)) for i in range(cols): for j",
"expVec / np.sum(expVec) def softmax_derivative(src): # Get size of input vector rows, cols",
"cols)) for i in range(cols): retVec[0, i] = max(src[0, i], 0.0) return retVec",
"> 1\") # Find softmax expVec = np.exp(src) return expVec / np.sum(expVec) def",
"Exception(\"Input rows > 1\") # Find softmax expVec = np.exp(src) return expVec /",
"retVec def relu_derivative(src): # Get size of input vector rows, cols = src.shape",
"= tmpVec[0, i] * (float((i == j)) - tmpVec[0, j]) return retMat def",
"size of input vector rows, cols = src.shape # Checking if rows >",
"rows > 1\") # Find relu retVec = np.zeros((1, cols)) for i in",
"* (float((i == j)) - tmpVec[0, j]) return retMat def relu(src): # Get",
"1\") # Find relu derivative retMat = np.zeros((cols, cols)) for i in range(cols):",
"return retVec def relu_derivative(src): # Get size of input vector rows, cols =",
"raise Exception(\"Input rows > 1\") # Find softmax expVec = np.exp(src) return expVec",
"np.zeros((1, cols)) for i in range(cols): retVec[0, i] = max(src[0, i], 0.0) return",
"Find softmax expVec = np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src): # Get",
"range(cols): for j in range(cols): retMat[i, j] = tmpVec[0, i] * (float((i ==",
"i in range(cols): for j in range(cols): retMat[i, j] = tmpVec[0, i] *",
"src.shape # Checking if rows > 1: raise Exception(\"Input rows > 1\") #",
"Find softmax derivative tmpVec = softmax(src) retMat = np.zeros((cols, cols)) for i in",
"import numpy as np import math def softmax(src): # Get size of input",
"0.0) return retVec def relu_derivative(src): # Get size of input vector rows, cols",
"retMat[i, j] = tmpVec[0, i] * (float((i == j)) - tmpVec[0, j]) return",
"j] = tmpVec[0, i] * (float((i == j)) - tmpVec[0, j]) return retMat",
"j]) return retMat def relu(src): # Get size of input vector rows, cols",
"i in range(cols): retVec[0, i] = max(src[0, i], 0.0) return retVec def relu_derivative(src):",
"i], 0.0) return retVec def relu_derivative(src): # Get size of input vector rows,",
"retMat = np.zeros((cols, cols)) for i in range(cols): if src[0, i] < 0.0:",
"in range(cols): retVec[0, i] = max(src[0, i], 0.0) return retVec def relu_derivative(src): #",
"np.zeros((cols, cols)) for i in range(cols): for j in range(cols): retMat[i, j] =",
"1: raise Exception(\"Input rows > 1\") # Find softmax expVec = np.exp(src) return",
"cols)) for i in range(cols): for j in range(cols): retMat[i, j] = tmpVec[0,",
"> 1: raise Exception(\"Input rows > 1\") # Find relu retVec = np.zeros((1,",
"i in range(cols): if src[0, i] < 0.0: retMat[i, i] = 0 else:",
"# Checking if rows > 1: raise Exception(\"Input rows > 1\") # Find",
"for j in range(cols): retMat[i, j] = tmpVec[0, i] * (float((i == j))",
"if rows > 1: raise Exception(\"Input rows > 1\") # Find relu derivative",
"Exception(\"Input rows > 1\") # Find softmax derivative tmpVec = softmax(src) retMat =",
"retMat def relu(src): # Get size of input vector rows, cols = src.shape",
"math def softmax(src): # Get size of input vector rows, cols = src.shape",
"> 1: raise Exception(\"Input rows > 1\") # Find softmax expVec = np.exp(src)",
"softmax expVec = np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src): # Get size",
"raise Exception(\"Input rows > 1\") # Find softmax derivative tmpVec = softmax(src) retMat",
"relu derivative retMat = np.zeros((cols, cols)) for i in range(cols): if src[0, i]",
"retMat = np.zeros((cols, cols)) for i in range(cols): for j in range(cols): retMat[i,",
"i] < 0.0: retMat[i, i] = 0 else: retMat[i, i] = 1 return",
"retVec[0, i] = max(src[0, i], 0.0) return retVec def relu_derivative(src): # Get size",
"softmax derivative tmpVec = softmax(src) retMat = np.zeros((cols, cols)) for i in range(cols):",
"# Find relu derivative retMat = np.zeros((cols, cols)) for i in range(cols): if",
"rows > 1: raise Exception(\"Input rows > 1\") # Find relu retVec =",
"> 1\") # Find relu derivative retMat = np.zeros((cols, cols)) for i in",
"Checking if rows > 1: raise Exception(\"Input rows > 1\") # Find softmax",
"tmpVec[0, j]) return retMat def relu(src): # Get size of input vector rows,",
"relu retVec = np.zeros((1, cols)) for i in range(cols): retVec[0, i] = max(src[0,",
"retVec = np.zeros((1, cols)) for i in range(cols): retVec[0, i] = max(src[0, i],",
"= np.zeros((cols, cols)) for i in range(cols): for j in range(cols): retMat[i, j]",
"i] * (float((i == j)) - tmpVec[0, j]) return retMat def relu(src): #",
"relu(src): # Get size of input vector rows, cols = src.shape # Checking",
"# Find relu retVec = np.zeros((1, cols)) for i in range(cols): retVec[0, i]",
"> 1: raise Exception(\"Input rows > 1\") # Find softmax derivative tmpVec =",
"vector rows, cols = src.shape # Checking if rows > 1: raise Exception(\"Input",
"expVec = np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src): # Get size of",
"for i in range(cols): if src[0, i] < 0.0: retMat[i, i] = 0",
"in range(cols): retMat[i, j] = tmpVec[0, i] * (float((i == j)) - tmpVec[0,",
"def softmax_derivative(src): # Get size of input vector rows, cols = src.shape #",
"= np.zeros((cols, cols)) for i in range(cols): if src[0, i] < 0.0: retMat[i,",
"softmax(src) retMat = np.zeros((cols, cols)) for i in range(cols): for j in range(cols):",
"= max(src[0, i], 0.0) return retVec def relu_derivative(src): # Get size of input",
"1: raise Exception(\"Input rows > 1\") # Find softmax derivative tmpVec = softmax(src)",
"return expVec / np.sum(expVec) def softmax_derivative(src): # Get size of input vector rows,",
"np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src): # Get size of input vector",
"raise Exception(\"Input rows > 1\") # Find relu derivative retMat = np.zeros((cols, cols))",
"numpy as np import math def softmax(src): # Get size of input vector",
"1\") # Find softmax expVec = np.exp(src) return expVec / np.sum(expVec) def softmax_derivative(src):",
"np import math def softmax(src): # Get size of input vector rows, cols"
] |
[
"x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname,",
"[] for dirname, dirs, files in os.walk(self.testroot): for dotted in [x for x",
"strip the root and replace it with our # local root. script =",
"found in the LICENSE file. import os import re from testrunner.local import testsuite",
"in os.walk(self.testroot): for dotted in [x for x in dirs if x.startswith(\".\")]: dirs.remove(dotted)",
"source code is governed by a BSD-style license that can be # found",
"of this source code is governed by a BSD-style license that can be",
"relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test =",
"root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a relative path,",
"= os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep,",
"file. import os import re from testrunner.local import testsuite from testrunner.objects import testcase",
"path for script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return",
"os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\"",
"LICENSE file. import os import re from testrunner.local import testsuite from testrunner.objects import",
"dirs.sort() files.sort() for filename in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath",
"dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in files: if (filename.endswith(ANY_JS)): fullpath",
"for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute path, strip the",
"os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\")",
"script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def",
"script.startswith(\"/\"): # Matched a relative path, prepend this test's directory. thisdir = os.path.dirname(self._get_source_path())",
"WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite,",
"**kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def",
"(filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname",
"os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = [] for dirname, dirs, files in",
"script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a relative path, prepend this test's directory.",
"filename) relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test",
"source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute",
"Copyright 2018 the V8 project authors. All rights reserved. # Use of this",
"this source code is governed by a BSD-style license that can be #",
"Matched an absolute path, strip the root and replace it with our #",
"*args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js =",
"files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if",
"\"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs)",
"in the LICENSE file. import os import re from testrunner.local import testsuite from",
"+ 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return",
"dirname, dirs, files in os.walk(self.testroot): for dotted in [x for x in dirs",
"for script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files",
"tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS) def GetSuite(*args, **kwargs): return",
"testsuite from testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP =",
"replace it with our # local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not",
"**kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root),",
"dirs.remove(dotted) dirs.sort() files.sort() for filename in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename)",
"def _get_source_path(self): # All tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS)",
"= re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot =",
"for filename in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot)",
"in [x for x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename",
"absolute path, strip the root and replace it with our # local root.",
"path, prepend this test's directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else:",
"def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\")",
"absolute path for script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ])",
"# Copyright 2018 the V8 project authors. All rights reserved. # Use of",
"return tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js),",
"def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")]",
"a BSD-style license that can be # found in the LICENSE file. import",
"testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def",
"dirs, files in os.walk(self.testroot): for dotted in [x for x in dirs if",
"\"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = [] for dirname,",
"Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\")",
"re from testrunner.local import testsuite from testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT",
"TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script",
"project authors. All rights reserved. # Use of this source code is governed",
"local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a relative",
"raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root,",
"directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path",
"\"testharness.js\")] source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an",
"fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname =",
"testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class",
"def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script in",
"ListTests(self): tests = [] for dirname, dirs, files in os.walk(self.testroot): for dotted in",
"else: raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(),",
"_get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source):",
"= relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return tests def _test_class(self): return TestCase",
"script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" %",
"tests = [] for dirname, dirs, files in os.walk(self.testroot): for dotted in [x",
"a relative path, prepend this test's directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir,",
"authors. All rights reserved. # Use of this source code is governed by",
"not script.startswith(\"/\"): # Matched a relative path, prepend this test's directory. thisdir =",
"return files def _get_source_path(self): # All tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path",
"\"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = []",
"files in os.walk(self.testroot): for dotted in [x for x in dirs if x.startswith(\".\")]:",
"return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source =",
"V8 project authors. All rights reserved. # Use of this source code is",
"Use of this source code is governed by a BSD-style license that can",
"an absolute path, strip the root and replace it with our # local",
"be # found in the LICENSE file. import os import re from testrunner.local",
"os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests =",
"import re from testrunner.local import testsuite from testrunner.objects import testcase ANY_JS = \".any.js\"",
"\"testharness-after.js\") ]) return files def _get_source_path(self): # All tests are named `path/name.any.js` return",
"import testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite):",
"BSD-style license that can be # found in the LICENSE file. import os",
"\".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs):",
"self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute path, strip",
"named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS) def GetSuite(*args, **kwargs): return TestSuite(*args, **kwargs)",
"if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in files: if (filename.endswith(ANY_JS)): fullpath =",
"= [] for dirname, dirs, files in os.walk(self.testroot): for dotted in [x for",
"ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self,",
"% script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self): #",
"= os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a relative path, prepend this",
"= \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args,",
"class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\",",
"code is governed by a BSD-style license that can be # found in",
"<reponame>katemihalikova/test262<gh_stars>1000+ # Copyright 2018 the V8 project authors. All rights reserved. # Use",
"import os import re from testrunner.local import testsuite from testrunner.objects import testcase ANY_JS",
"import testsuite from testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP",
"= self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute path,",
"relative path, prepend this test's directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script)",
"for dotted in [x for x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort()",
"and replace it with our # local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif",
"rights reserved. # Use of this source code is governed by a BSD-style",
"it with our # local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"):",
"os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" % script); files.append(script)",
"self._create_test(testname) tests.append(test) return tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files",
"fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test)",
"script) else: raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" % script); files.append(script) files.extend([",
"1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return tests",
"def ListTests(self): tests = [] for dirname, dirs, files in os.walk(self.testroot): for dotted",
"for dirname, dirs, files in os.walk(self.testroot): for dotted in [x for x in",
"-len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return tests def _test_class(self):",
"script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self): # All",
"super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\",",
"files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self): # All tests are",
"= os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = [] for dirname, dirs, files",
"script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a relative path, prepend",
"2018 the V8 project authors. All rights reserved. # Use of this source",
"testrunner.local import testsuite from testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\"",
"root and replace it with our # local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):])",
"\"mjsunit.js\") def ListTests(self): tests = [] for dirname, dirs, files in os.walk(self.testroot): for",
"# Matched an absolute path, strip the root and replace it with our",
"if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1 : -len(ANY_JS)]",
"\"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = [] for",
"All rights reserved. # Use of this source code is governed by a",
"re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root,",
": -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return tests def",
"class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for",
"by a BSD-style license that can be # found in the LICENSE file.",
"= [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT):",
"= os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path for script: \\\"%s\\\"\" % script);",
"All tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS) def GetSuite(*args, **kwargs):",
"_get_source_path(self): # All tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS) def",
"= os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path for script:",
"# All tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS) def GetSuite(*args,",
"testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return tests def _test_class(self): return",
"are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path + ANY_JS) def GetSuite(*args, **kwargs): return TestSuite(*args,",
"is governed by a BSD-style license that can be # found in the",
"the root and replace it with our # local root. script = os.path.join(self.suite.testroot,",
"os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a relative path, prepend this test's",
"this test's directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected",
"files def _get_source_path(self): # All tests are named `path/name.any.js` return os.path.join(self.suite.testroot, self.path +",
"test = self._create_test(testname) tests.append(test) return tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def",
"path, strip the root and replace it with our # local root. script",
"files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self): # All tests",
"with our # local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): #",
"elif not script.startswith(\"/\"): # Matched a relative path, prepend this test's directory. thisdir",
"governed by a BSD-style license that can be # found in the LICENSE",
"prepend this test's directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise",
"= \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args,",
"self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\")",
"x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in files: if",
"can be # found in the LICENSE file. import os import re from",
"reserved. # Use of this source code is governed by a BSD-style license",
"self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = [] for dirname, dirs,",
"# Use of this source code is governed by a BSD-style license that",
"\"mjsunit\", \"mjsunit.js\") def ListTests(self): tests = [] for dirname, dirs, files in os.walk(self.testroot):",
"filename in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) +",
"]) return files def _get_source_path(self): # All tests are named `path/name.any.js` return os.path.join(self.suite.testroot,",
"\"/\") test = self._create_test(testname) tests.append(test) return tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase):",
"TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source()",
"that can be # found in the LICENSE file. import os import re",
"script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute path, strip the root",
"files.sort() for filename in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath =",
"os.walk(self.testroot): for dotted in [x for x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort()",
"test's directory. thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute",
"tests.append(test) return tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files =",
"script.startswith(WPT_ROOT): # Matched an absolute path, strip the root and replace it with",
"# found in the LICENSE file. import os import re from testrunner.local import",
"[x for x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in",
"our # local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched",
"= os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self): tests",
"if script.startswith(WPT_ROOT): # Matched an absolute path, strip the root and replace it",
"in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in files: if (filename.endswith(ANY_JS)):",
"tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root,",
"\\\"%s\\\"\" % script); files.append(script) files.extend([ self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self):",
"= self._create_test(testname) tests.append(test) return tests def _test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self):",
"in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute path, strip the root and",
"# Matched a relative path, prepend this test's directory. thisdir = os.path.dirname(self._get_source_path()) script",
"thisdir = os.path.dirname(self._get_source_path()) script = os.path.join(thisdir, script) else: raise Exception(\"Unexpected absolute path for",
"Matched a relative path, prepend this test's directory. thisdir = os.path.dirname(self._get_source_path()) script =",
"in files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1",
"self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js = os.path.join(os.path.dirname(self.root), \"mjsunit\", \"mjsunit.js\") def ListTests(self):",
"META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched an absolute path, strip the root and replace",
"license that can be # found in the LICENSE file. import os import",
"from testrunner.local import testsuite from testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT =",
"TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\",",
"os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): # Matched",
"dotted in [x for x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for",
"for x in dirs if x.startswith(\".\")]: dirs.remove(dotted) dirs.sort() files.sort() for filename in files:",
"relpath.replace(os.path.sep, \"/\") test = self._create_test(testname) tests.append(test) return tests def _test_class(self): return TestCase class",
"# local root. script = os.path.join(self.suite.testroot, script[len(WPT_ROOT):]) elif not script.startswith(\"/\"): # Matched a",
"from testrunner.objects import testcase ANY_JS = \".any.js\" WPT_ROOT = \"/wasm/jsapi/\" META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\")",
"[os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source = self.get_source() for script in META_SCRIPT_REGEXP.findall(source): if script.startswith(WPT_ROOT): #",
"META_SCRIPT_REGEXP = re.compile(r\"META:\\s*script=(.*)\") class TestSuite(testsuite.TestSuite): def __init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot",
"os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self): # All tests are named `path/name.any.js`",
"= fullpath[len(self.testroot) + 1 : -len(ANY_JS)] testname = relpath.replace(os.path.sep, \"/\") test = self._create_test(testname)",
"the LICENSE file. import os import re from testrunner.local import testsuite from testrunner.objects",
"os import re from testrunner.local import testsuite from testrunner.objects import testcase ANY_JS =",
"the V8 project authors. All rights reserved. # Use of this source code",
"self._get_source_path(), os.path.join(self.suite.root, \"testharness-after.js\") ]) return files def _get_source_path(self): # All tests are named",
"_test_class(self): return TestCase class TestCase(testcase.D8TestCase): def _get_files_params(self): files = [os.path.join(self.suite.mjsunit_js), os.path.join(self.suite.root, \"testharness.js\")] source",
"files: if (filename.endswith(ANY_JS)): fullpath = os.path.join(dirname, filename) relpath = fullpath[len(self.testroot) + 1 :",
"__init__(self, *args, **kwargs): super(TestSuite, self).__init__(*args, **kwargs) self.testroot = os.path.join(self.root, \"data\", \"test\", \"js-api\") self.mjsunit_js"
] |
[
"self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2)",
"\"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self):",
"Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2)",
"Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) if __name__ == \"__main__\":",
"\"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser",
"parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError):",
"parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1,",
"= PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\",",
"Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2)",
"PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\",",
"import PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def",
"Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\",",
"self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser()",
"parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) if __name__ == \"__main__\": unittest.main()",
"unittest from paiargparse import PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1, Level2a",
"paiargparse import PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase):",
"= PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\",",
"parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0,",
"self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a)",
"from test.dataclasse_setup import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser =",
"Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b)",
"test.dataclasse_setup import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser()",
"Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\",",
"PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self):",
"\"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser =",
"parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def",
"parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\",",
"Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\",",
"Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\",",
"import unittest from paiargparse import PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1,",
"def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1)",
"parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args =",
"test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with",
"parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) if __name__ ==",
"parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) if",
"parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\",",
"class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a)",
"from paiargparse import PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1, Level2a class",
"RequiredArgumentError from test.dataclasse_setup import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser",
"Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\",",
"Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a)",
"TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\",",
"def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1)",
"args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3,",
"Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b)",
"test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args",
"import Level1b, Level2, Level1, Level2a class TestPAIParser(unittest.TestCase): def test_three_dc(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\",",
"Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\", \"0.5\"])",
"= parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1)",
"Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) if __name__",
"<filename>test/test_multi_args.py import unittest from paiargparse import PAIArgumentParser, RequiredArgumentError from test.dataclasse_setup import Level1b, Level2,",
"parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"]) self.assertIsInstance(args.arg0, Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2,",
"self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b)",
"Level1b) self.assertIsInstance(args.arg1, Level2) self.assertIsInstance(args.arg2, Level2a) self.assertIsInstance(args.arg3, Level1) def test_three_dc_required(self): parser = PAIArgumentParser() parser.add_root_argument(\"arg0\",",
"Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) args = parser.parse_args(args=[\"--arg0.p1\", \"0\", \"--arg2.p1a\", \"0.5\"])",
"PAIArgumentParser() parser.add_root_argument(\"arg0\", Level1b) parser.add_root_argument(\"arg1\", Level2) parser.add_root_argument(\"arg2\", Level2a) parser.add_root_argument(\"arg3\", Level1) with self.assertRaises(RequiredArgumentError): parser.parse_args(args=[\"--arg3.p1\", \"0\","
] |
[
"in (0,L). The problem definition is implied by the method of manufactured solution,",
"to u^{n+1} in the mathematical scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u",
"name: if n not in self.prm: self._illegal_parameter(name) return [self.prm[n] for n in name]",
"local variables to ease reading L, c, T = self.problem['L c T'.split()] L",
"self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing to look up",
"u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] +",
"if isinstance(U_0, (float,int)) and U_0 == 0: U_0 = lambda t: 0 if",
"= inspect.getsource(I) + '_' + inspect.getsource(V) + \\ '_' + inspect.getsource(f) + '_'",
"+ \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] -",
"< tol def test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized versions for a",
"We simulate in [0, L/2] and apply a symmetry condition at the end",
"of parameters. self.type and self.help are optional, but self.prms must be complete and",
"to mathematical # notation in computational scheme u_1 = self.f.u[0,:] u = self.f.u[1,:]",
"the domain used (symmetry) x = np.linspace(0, L, Nx+1) # Mesh points in",
"+ \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] -",
"\\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if user_action is not None: user_action(u,",
"in self.type else str help = self.help[name] if name in self.help else None",
"= dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of spatial mesh points',",
"q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i],",
"c**2 C2 = (dt/dx)**2; dt2 = dt*dt # Help variables in the scheme",
"t = np.linspace(0, T, Nt+1) # Mesh points in time for n in",
"hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and \\ hasattr(self, 'type') and \\ isinstance(self.type,",
"hash of all input data import hashlib, inspect data = inspect.getsource(I) + '_'",
"+ q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\",
"i-1 ip1 = im1 # i+1 -> i-1 u[i] = u_1[i] + dt*V(x[i])",
"dt*dt # Help variables in the scheme # Wrap user-given f, I, V,",
"n+1): break cpu_time = time.clock() - t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run",
"- t[0] # Treat c(x) as array if isinstance(c, (float,int)): c = np.zeros(x.shape)",
"print 'diff:', diff tol = 1E-13 assert diff < tol def test_quadratic_with_classes(): \"\"\"",
"= 1E-13 assert diff < tol def test_quadratic_with_classes(): \"\"\" Check the scalar and",
"x and t dx = x[1] - x[0] dt = t[1] - t[0]",
"into u_1 for i in range(0,Nx+1): u_1[i] = I(x[i]) if user_action is not",
"# Make hash of all input data import hashlib, inspect data = inspect.getsource(I)",
"i-1 -> i+1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1]",
"\\ isinstance(self.help, dict): return True else: raise ValueError( 'The constructor in class %s",
"= dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help =",
"name, value): \"\"\" Allow obj[name] = value syntax to assign a parameter's value.",
"default values, self.type with the corresponding types, and self.help with the corresponding descriptions",
"else inspect.getsource(U_L)) + \\ '_' + str(L) + str(dt) + '_' + str(C)",
"else \\ lambda x: np.zeros(x.shape) if U_0 is not None: if isinstance(U_0, (float,int))",
"# -*- coding: utf-8 -*- \"\"\" Class implementation for solving of the wave",
"already run return -1, hashed_input # use local variables to make code closer",
"= solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters",
"is not None: user_action(u_1, x, t, 0) # Special formula for the first",
"points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function # introduce some local help",
"user_action(u_1, x, t, 0) # Special formula for the first step for i",
"else str help = self.help[name] if name in self.help else None parser.add_argument( '--'",
"u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else:",
"u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else:",
"the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should",
"return self.get(name) def __setitem__(self, name, value): \"\"\" Allow obj[name] = value syntax to",
"command line parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args)",
"- u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1]",
"with x and t dx = x[1] - x[0] dt = t[1] -",
"self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt t = np.linspace(0,",
"Allow obj[name] = value syntax to assign a parameter's value. \"\"\" return self.set(name=value)",
"if U_L is None: im1 = i-1 ip1 = im1 # i+1 ->",
"= u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\",
"self.prm = dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float, T=float) self.help = dict(L='1D",
"'\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about illegal",
"- u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\",
"return True else: raise ValueError( 'The constructor in class %s does not '\\",
"self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I V = self.problem.V",
"CPU time measurement Ix = range(0, Nx+1) It = range(0, Nt+1) # Load",
"I is None or I == 0: I = (lambda x: 0) if",
"for i in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] +",
"__setitem__(self, name, value): \"\"\" Allow obj[name] = value syntax to assign a parameter's",
"u_1[i] = I(x[i]) if user_action is not None: user_action(u_1, x, t, 0) #",
"U_0 is not None: if isinstance(U_0, (float,int)) and U_0 == 0: U_0 =",
"u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if user_action is not",
"2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i]",
"assert diff < tol def test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized versions",
"inner points if version == 'scalar': for i in Ix[1:-1]: u[i] = -",
"V(self, x): return 0.5*self.u_exact(x, 0) def f(self, x, t): c = self['c'] return",
"dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i]",
"'type') and \\ isinstance(self.type, dict) and \\ hasattr(self, 'help') and \\ isinstance(self.help, dict):",
"(lambda x: 0) if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if",
"x, t, n+1): break cpu_time = time.clock() - t0 return cpu_time, hashed_input def",
"optional, but self.prms must be complete and contain all parameters. \"\"\" pass def",
"parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp, help=help) return parser def init_from_command_line(self, args):",
"here and compare with a known exact solution. \"\"\" import time, glob, shutil,",
"u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif",
"u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) -",
"self.m.Nt L, T = self.problem['L T'.split()] L = L/2 # only half the",
"and self.help are optional, but self.prms must be complete and contain all parameters.",
"q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i =",
"if name in self.type else str help = self.help[name] if name in self.help",
"if U_L is None else inspect.getsource(U_L)) + \\ '_' + str(L) + str(dt)",
"tp = self.type[name] if name in self.type else str help = self.help[name] if",
"= u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\",
"parameter.\"\"\" return self.get(name) def __setitem__(self, name, value): \"\"\" Allow obj[name] = value syntax",
"Ix[-1] if U_L is None: im1 = i-1 ip1 = im1 # i+1",
"cpu_time = time.clock() - t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh",
"L/2] and apply a symmetry condition at the end x=L/2. \"\"\" problem =",
"= Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh function f will,",
"range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff",
"manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced,",
"factor') from UniformFDMesh import Mesh, Function # introduce some local help variables to",
"= U_L(t[n+1]) if user_action is not None: if user_action(u, x, t, n+1): break",
"look up a parameter.\"\"\" return self.get(name) def __setitem__(self, name, value): \"\"\" Allow obj[name]",
"Nx+1) # Mesh points in space # Make sure dx and dt are",
"= self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt =",
"through mesh and check error\"\"\" Nx = self['Nx'] Nt = self.m.Nt L, T",
"+ \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0 is None: # Set",
"U_L if None or 0 if f is None or f == 0:",
"u_exact(self, x, t): L = self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x,",
"('None' if U_0 is None else inspect.getsource(U_0)) + \\ ('None' if U_L is",
"self.m.d[0] I = self.problem.I V = self.problem.V f = self.problem.f U_0 = self.problem.U_0",
"version == 'scalar': for i in Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i]",
"u^{n+1} in the mathematical scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u =",
"\\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] =",
"x, t, 1) for n in It[1:-1]: # u corresponds to u^{n+1} in",
"scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:] # Update all",
"in self.help else None parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp, help=help) return",
"\\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'):",
"the solution for the whole domain and all time steps. self.f = Function(self.m,",
"name): \"\"\"Allow obj[name] indexing to look up a parameter.\"\"\" return self.get(name) def __setitem__(self,",
"sure dx and dt are compatible with x and t dx = x[1]",
"end x=L/2. \"\"\" def __init__(self, problem): self.problem = problem self.prm = dict(C =",
"+ \\ '_' + inspect.getsource(f) + '_' + str(c) + '_' + \\",
"Class implementation for solving of the wave equation u_tt = (c**2*u_x)_x + f(x,t)",
"for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x) is constant. We",
"time.clock() - t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh and check",
"for name in self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters",
"t, 0) # Special formula for the first step for i in Ix[1:-1]:",
"= 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number',",
"known exact solution. \"\"\" import time, glob, shutil, os import numpy as np",
"wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T] and x",
"= value syntax to assign a parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self,",
"U_L(dt) if user_action is not None: user_action(u, x, t, 1) for n in",
"Insert boundary conditions i = Ix[0] if U_0 is None: # Set boundary",
"x = np.linspace(0, L, Nx+1) # Mesh points in space # Make sure",
"all time steps. self.f = Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): #",
"the command line parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser) args = parser.parse_args()",
"user_action is not None: user_action(u_1, x, t, 0) # Special formula for the",
"use local variables to make code closer to mathematical # notation in computational",
"vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x) is",
"= np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol = 1E-13 assert diff <",
"c c_ = np.zeros(x.shape) for i in range(Nx+1): c_[i] = c(x[i]) c =",
"= (c**2*u_x)_x + f(x,t) with t in [0,T] and x in (0,L). We",
"c_[i] = c(x[i]) c = c_ q = c**2 C2 = (dt/dx)**2; dt2",
"self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters for the wave",
"be exactly reproduced when c is const. \"\"\" def __init__(self): self.prm = dict(L=2.5,",
"we use a constant c here and compare with a known exact solution.",
"with t in [0,T] and x in (0,L). We have u=U_0 or du/dn=0",
"if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if V is None",
"x in (0,L). We have u=U_0 or du/dn=0 on x=0, and u=u_L or",
"- u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else:",
"L, Nx+1) # Mesh points in space # Make sure dx and dt",
"and U_0 == 0: U_0 = lambda t: 0 if U_L is not",
"Nx+1) It = range(0, Nt+1) # Load initial condition into u_1 for i",
"provided c(x) is constant. We simulate in [0, L/2] and apply a symmetry",
"t: 0 if U_L is not None: if isinstance(U_L, (float,int)) and U_L ==",
"mathematical # notation in computational scheme u_1 = self.f.u[0,:] u = self.f.u[1,:] import",
"-> i+1 since u[i-1]=u[i+1] # when du/dn = 0, on x=L: i+1 ->",
"time steps. self.f = Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): # ...use",
"is not None: if user_action(u, x, t, n+1): break cpu_time = time.clock() -",
"\\ isinstance(self.prm, dict) and \\ hasattr(self, 'type') and \\ isinstance(self.type, dict) and \\",
"du/dn=0 on x=0, and u=u_L or du/dn=0 on x = L. For simplicity,",
"- u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i = Ix[-1]",
"# x=0: i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 ->",
"version == 'scalar' else \\ lambda x, t: np.zeros(x.shape) if I is None",
"local help variables to ease reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval",
"(lambda x, t: 0) if version == 'scalar' else \\ lambda x, t:",
"= self.problem.V f = self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L Nt =",
"c = c_ q = c**2 C2 = (dt/dx)**2; dt2 = dt*dt #",
"= self.f.u[1,:] import time; t0 = time.clock() # CPU time measurement Ix =",
"u[i] = U_L(dt) if user_action is not None: user_action(u, x, t, 1) for",
"must be complete and contain all parameters. \"\"\" pass def ok(self): \"\"\"Check if",
"or 0 if f is None or f == 0: f = (lambda",
"0) def f(self, x, t): c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t):",
"t[0]) else: u[i] = U_L(dt) if user_action is not None: user_action(u, x, t,",
"variables to ease reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T']",
"' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or more parameters.\"\"\" for name in",
"the whole domain and all time steps. self.f = Function(self.m, num_comp=1, space_only=False) def",
"ip1 = im1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] +",
"Ix[0] if U_0 is None: # Set boundary values # x=0: i-1 ->",
"metavar=name, type=tp, help=help) return parser def init_from_command_line(self, args): for name in self.prm: self.prm[name]",
"It[1:-1]: # u corresponds to u^{n+1} in the mathematical scheme u_2 = self.f.u[n-1,:]",
"return self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x, 0) def f(self, x, t):",
"is None: # Set boundary values (x=0: i-1 -> i+1 since u[i-1]=u[i+1] #",
"This solution should be exactly reproduced, provided c is const. We simulate in",
"of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly",
"in range(0,Nx+1): u_1[i] = I(x[i]) if user_action is not None: user_action(u_1, x, t,",
"as our solution. This solution should be exactly reproduced when c is const.",
"== 0: U_0 = lambda t: 0 if U_L is not None: if",
"stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of spatial",
"to ease reading L, c, T = self.problem['L c T'.split()] L = L/2",
"Parameters(object): def __init__(self): \"\"\" Subclasses must initialize self.prm with parameters and default values,",
"and help are defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and \\",
"- \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i =",
"'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception",
"# Treat c(x) as array if isinstance(c, (float,int)): c = np.zeros(x.shape) + c",
"help variables to ease reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval =",
"define_command_line_options(self, parser=None): self.ok() if parser is None: import argparse parser = argparse.ArgumentParser() for",
"U_L is None else inspect.getsource(U_L)) + \\ '_' + str(L) + str(dt) +",
"Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i])",
"u=U_0 or du/dn=0 on x=0, and u=u_L or du/dn=0 on x = L.",
"for solving of the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t",
"parameter values.\"\"\" if isinstance(name, (list,tuple)): # get many? for n in name: if",
"range(Nx+1): c_[i] = c(x[i]) c = c_ q = c**2 C2 = (dt/dx)**2;",
"measurement Ix = range(0, Nx+1) It = range(0, Nt+1) # Load initial condition",
"= - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) -",
"that is exactly reproduced, provided c(x) is constant. We simulate in [0, L/2]",
"+ str(L) + str(dt) + '_' + str(C) + '_' + str(T) +",
"\\ lambda x: np.zeros(x.shape) if V is None or V == 0: V",
"\\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i = Ix[-1] if U_L is",
"Mesh points in space # Make sure dx and dt are compatible with",
"check error\"\"\" Nx = self['Nx'] Nt = self.m.Nt L, T = self.problem['L T'.split()]",
"= ip1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1]",
"a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x) is constant. We simulate",
"obj[name] indexing to look up a parameter.\"\"\" return self.get(name) def __setitem__(self, name, value):",
"diff tol = 1E-13 assert diff < tol def test_quadratic_with_classes(): \"\"\" Check the",
"def u_exact(self, x, t): L = self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return",
"\"\"\"Run through mesh and check error\"\"\" Nx = self['Nx'] Nt = self.m.Nt L,",
"to ease reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt",
"ip1 = im1 # i+1 -> i-1 u[i] = u_1[i] + dt*V(x[i]) +",
"in space # Make sure dx and dt are compatible with x and",
"+ str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation",
"u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0 is None: #",
"du/dn=0 # x=L: i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1",
"self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing to look up a parameter.\"\"\" return",
"solver = Solver(problem) # Read input from the command line parser = problem.define_command_line_options()",
"def solve(self, user_action=None, version='scalar'): # ...use local variables to ease reading L, c,",
"\"\"\"Allow obj[name] indexing to look up a parameter.\"\"\" return self.get(name) def __setitem__(self, name,",
"import time; t0 = time.clock() # CPU time measurement Ix = range(0, Nx+1)",
"whole domain and all time steps. self.f = Function(self.m, num_comp=1, space_only=False) def solve(self,",
"obj[name] = value syntax to assign a parameter's value. \"\"\" return self.set(name=value) def",
"np.linspace(0, L, Nx+1) # Mesh points in space t = np.linspace(0, T, Nt+1)",
"problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve() print 'Check error.........................' solver.assert_no_error()",
"'scalar' else \\ lambda x: np.zeros(x.shape) if U_0 is not None: if isinstance(U_0,",
"space_only=False) def solve(self, user_action=None, version='scalar'): # ...use local variables to ease reading L,",
"C, Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I =",
"else: raise ValueError( 'The constructor in class %s does not '\\ 'initialize the\\ndictionaries",
"% version) # Insert boundary conditions i = Ix[0] if U_0 is None:",
"0) if version == 'scalar' else \\ lambda x, t: np.zeros(x.shape) if I",
"u[i] = U_0(dt) i = Ix[-1] if U_L is None: im1 = i-1",
"argparse.ArgumentParser() for name in self.prm: tp = self.type[name] if name in self.type else",
"Make hash of all input data import hashlib, inspect data = inspect.getsource(I) +",
"Function # introduce some local help variables to ease reading L_end = self.problem['L']",
"q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version) #",
"u_1 for i in range(0,Nx+1): u_1[i] = I(x[i]) if user_action is not None:",
"i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 -> i-1 since u[i+1]=u[i-1] when",
"None class Solver(Parameters): \"\"\" Numerical parameters for solving the wave equation u_tt =",
"u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced, provided c is",
"= parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one or more parameter values.\"\"\"",
"self.type[name] if name in self.type else str help = self.help[name] if name in",
"hashed_input + '_archive.npz'): # Simulation is already run return -1, hashed_input # use",
"== 'scalar' else \\ lambda x: np.zeros(x.shape) if U_0 is not None: if",
"dict) and \\ hasattr(self, 'type') and \\ isinstance(self.type, dict) and \\ hasattr(self, 'help')",
"= self.type[name] if name in self.type else str help = self.help[name] if name",
"time.clock() # CPU time measurement Ix = range(0, Nx+1) It = range(0, Nt+1)",
"0 if U_L is not None: if isinstance(U_L, (float,int)) and U_L == 0:",
"for the first step for i in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i])",
"not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name):",
"not in self.prm: self._illegal_parameter(name) return [self.prm[n] for n in name] else: if name",
"user_action is not None: if user_action(u, x, t, n+1): break cpu_time = time.clock()",
"user_action=None, version='scalar'): # ...use local variables to ease reading L, c, T =",
"1) for n in It[1:-1]: # u corresponds to u^{n+1} in the mathematical",
"if version == 'scalar': for i in Ix[1:-1]: u[i] = - u_2[i] +",
"i+1 im1 = ip1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i]",
"contain # the solution for the whole domain and all time steps. self.f",
"Mesh points in time x = np.linspace(0, L, Nx+1) # Mesh points in",
"# x=L: i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1",
"= np.zeros(x.shape) for i in range(Nx+1): c_[i] = c(x[i]) c = c_ q",
"U_0(dt) i = Ix[-1] if U_L is None: im1 = i-1 ip1 =",
"else: u[i] = U_0(dt) i = Ix[-1] if U_L is None: im1 =",
"dx = self.m.d[0] I = self.problem.I V = self.problem.V f = self.problem.f U_0",
"from the command line parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser) args =",
"U_0 == 0: U_0 = lambda t: 0 if U_L is not None:",
"name) class Problem(Parameters): \"\"\" Physical parameters for the wave equation u_tt = (c**2*u_x)_x",
"du/dn=0 on x = L. For simplicity, we use a constant c here",
"parser=None): self.ok() if parser is None: import argparse parser = argparse.ArgumentParser() for name",
"dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if user_action is not None: if user_action(u,",
"self.ok() if parser is None: import argparse parser = argparse.ArgumentParser() for name in",
"parser = argparse.ArgumentParser() for name in self.prm: tp = self.type[name] if name in",
"str(L) + str(dt) + '_' + str(C) + '_' + str(T) + \\",
"Nt+1) # Load initial condition into u_1 for i in range(0,Nx+1): u_1[i] =",
"parser = solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() #",
"in It[1:-1]: # u corresponds to u^{n+1} in the mathematical scheme u_2 =",
"problem self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float)",
"for i in Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i]",
"is None: im1 = i-1 ip1 = im1 u[i] = - u_2[i] +",
"self.prms must be complete and contain all parameters. \"\"\" pass def ok(self): \"\"\"Check",
"is None or f == 0: f = (lambda x, t: 0) if",
"with the corresponding descriptions of parameters. self.type and self.help are optional, but self.prms",
"return self.u_exact(0, t) U_L = None class Solver(Parameters): \"\"\" Numerical parameters for solving",
"u = self.f.u[n+1,:] # Update all inner points if version == 'scalar': for",
"def U_0(self, t): return self.u_exact(0, t) U_L = None class Solver(Parameters): \"\"\" Numerical",
"in [0,T] and x in (0,L). The problem definition is implied by the",
"solving the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T]",
"__init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float, T=float) self.help =",
"args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve() print",
"is not None: if isinstance(U_L, (float,int)) and U_L == 0: U_L = lambda",
"(float,int)) and U_L == 0: U_L = lambda t: 0 # Make hash",
"does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self,",
"= Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): # ...use local variables to",
"i in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1]",
"type=tp, help=help) return parser def init_from_command_line(self, args): for name in self.prm: self.prm[name] =",
"[0, L/2] and apply a symmetry condition at the end x=L/2. \"\"\" problem",
"in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:',",
"+ '_' + str(C) + '_' + str(T) + \\ '_' + str(stability_safety_factor)",
"the corresponding types, and self.help with the corresponding descriptions of parameters. self.type and",
"self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one or more parameter",
"self.f.u[1,:] import time; t0 = time.clock() # CPU time measurement Ix = range(0,",
"def __init__(self, problem): self.problem = problem self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0)",
"u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x) is constant. We simulate in [0,",
"if U_L is None: im1 = i-1 ip1 = im1 u[i] = -",
"if U_L is not None: if isinstance(U_L, (float,int)) and U_L == 0: U_L",
"\"\"\"Raise exception about illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is not registered.\\nLegal",
"registered.\\nLegal '\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one",
"0: I = (lambda x: 0) if version == 'scalar' else \\ lambda",
"= range(0, Nx+1) It = range(0, Nt+1) # Load initial condition into u_1",
"i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 -> i-1 since",
"parameters for the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in",
"Problem(Parameters): \"\"\" Physical parameters for the wave equation u_tt = (c**2*u_x)_x + f(x,t)",
"-1, hashed_input # use local variables to make code closer to mathematical #",
"x=L/2. \"\"\" problem = Problem() solver = Solver(problem) # Read input from the",
"return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x,",
"\\ lambda x, t: np.zeros(x.shape) if I is None or I == 0:",
"t: np.zeros(x.shape) if I is None or I == 0: I = (lambda",
"t in [0,T] and x in (0,L). The problem definition is implied by",
"= - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) -",
"one or more parameter values.\"\"\" if isinstance(name, (list,tuple)): # get many? for n",
"self.m.Nt t = np.linspace(0, T, Nt+1) # Mesh points in time x =",
"None or V == 0: V = (lambda x: 0) if version ==",
"in space t = np.linspace(0, T, Nt+1) # Mesh points in time for",
"raise ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s' % (name, '",
"fill array c c_ = np.zeros(x.shape) for i in range(Nx+1): c_[i] = c(x[i])",
"u_e).max() print 'diff:', diff tol = 1E-13 assert diff < tol def test_quadratic_with_classes():",
"data import hashlib, inspect data = inspect.getsource(I) + '_' + inspect.getsource(V) + \\",
"V == 0: V = (lambda x: 0) if version == 'scalar' else",
"= self.f.u[n,:] u = self.f.u[n+1,:] # Update all inner points if version ==",
"+ dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] +",
"t[0]) i = Ix[0] if U_0 is None: # Set boundary values (x=0:",
"self.f = Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): # ...use local variables",
"C2 = (dt/dx)**2; dt2 = dt*dt # Help variables in the scheme #",
"x=0: i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 -> i-1",
"[0, L/2] and apply a symmetry condition at the end x=L/2. \"\"\" def",
"parser is None: import argparse parser = argparse.ArgumentParser() for name in self.prm: tp",
"types, and self.help with the corresponding descriptions of parameters. self.type and self.help are",
"print parser.parse_args() # parameters ok? solver.solve() print 'Check error.........................' solver.assert_no_error() if __name__ ==",
"= range(0, Nt+1) # Load initial condition into u_1 for i in range(0,Nx+1):",
"str help = self.help[name] if name in self.help else None parser.add_argument( '--' +",
"the scheme # Wrap user-given f, I, V, U_0, U_L if None or",
"0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i = Ix[-1] if U_L is None:",
"+ '_' + str(T) + \\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if",
"the scalar and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced,",
"\"\"\" pass def ok(self): \"\"\"Check if attr. prm, type, and help are defined.\"\"\"",
"self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter name.\"\"\" raise",
"c here and compare with a known exact solution. \"\"\" import time, glob,",
"parameters. \"\"\" pass def ok(self): \"\"\"Check if attr. prm, type, and help are",
"q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i],",
"assert_no_error(self): \"\"\"Run through mesh and check error\"\"\" Nx = self['Nx'] Nt = self.m.Nt",
"== 0: f = (lambda x, t: 0) if version == 'scalar' else",
"lambda x: np.zeros(x.shape) if V is None or V == 0: V =",
"Subclasses must initialize self.prm with parameters and default values, self.type with the corresponding",
"= Ix[0] if U_0 is None: # Set boundary values # x=0: i-1",
"= time.clock() # CPU time measurement Ix = range(0, Nx+1) It = range(0,",
"as np class Parameters(object): def __init__(self): \"\"\" Subclasses must initialize self.prm with parameters",
"class Solver(Parameters): \"\"\" Numerical parameters for solving the wave equation u_tt = (c**2*u_x)_x",
"\"\"\" Physical parameters for the wave equation u_tt = (c**2*u_x)_x + f(x,t) with",
"dict(L='1D domain', c='coefficient (wave velocity) in PDE', T='end time of simulation') def u_exact(self,",
"Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I",
"- u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0])",
"name in self.prm: tp = self.type[name] if name in self.type else str help",
"= I(x[i]) if user_action is not None: user_action(u_1, x, t, 0) # Special",
"for n in It[1:-1]: # u corresponds to u^{n+1} in the mathematical scheme",
"step for i in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i]",
"dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] +",
"constant c here and compare with a known exact solution. \"\"\" import time,",
"(name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or more parameters.\"\"\" for name",
"in self.prm: self._illegal_parameter(name) return [self.prm[n] for n in name] else: if name not",
"= dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float, T=float) self.help = dict(L='1D domain',",
"'diff:', diff tol = 1E-13 assert diff < tol def test_quadratic_with_classes(): \"\"\" Check",
"UniformFDMesh import Mesh, Function # introduce some local help variables to ease reading",
"0, on x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1 =",
"simulation') def u_exact(self, x, t): L = self['L'] return x*(L-x)*(1+0.5*t) def I(self, x):",
"None or f == 0: f = (lambda x, t: 0) if version",
"'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter",
"\"\"\" Subclasses must initialize self.prm with parameters and default values, self.type with the",
"name in self.type else str help = self.help[name] if name in self.help else",
"name] else: if name not in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name):",
"lambda t: 0 if U_L is not None: if isinstance(U_L, (float,int)) and U_L",
"None: user_action(u, x, t, 1) for n in It[1:-1]: # u corresponds to",
"one or more parameters.\"\"\" for name in parameters: if name in self.prm: self.prm[name]",
"Mesh points in time for n in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff",
"0) # Special formula for the first step for i in Ix[1:-1]: u[i]",
"name not in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing",
"self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of spatial mesh",
"is not None: user_action(u, x, t, 1) for n in It[1:-1]: # u",
"L = L/2 # only half the domain used (symmetry) x = np.linspace(0,",
"isinstance(self.help, dict): return True else: raise ValueError( 'The constructor in class %s does",
"more parameters.\"\"\" for name in parameters: if name in self.prm: self.prm[name] = parameters[name]",
"name): \"\"\"Get one or more parameter values.\"\"\" if isinstance(name, (list,tuple)): # get many?",
"u_1 = self.f.u[0,:] u = self.f.u[1,:] import time; t0 = time.clock() # CPU",
"c = np.zeros(x.shape) + c elif callable(c): # Call c(x) and fill array",
"+ str(c) + '_' + \\ ('None' if U_0 is None else inspect.getsource(U_0))",
"u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced when c is",
"for i in range(0,Nx+1): u_1[i] = I(x[i]) if user_action is not None: user_action(u_1,",
"I(x[i]) if user_action is not None: user_action(u_1, x, t, 0) # Special formula",
"i+1 im1 = ip1 # i-1 -> i+1 u[i] = u_1[i] + dt*V(x[i])",
"x = L. For simplicity, we use a constant c here and compare",
"points in time for n in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff =",
"isinstance(U_L, (float,int)) and U_L == 0: U_L = lambda t: 0 # Make",
"exception about illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\",
"u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i]",
"self.help are optional, but self.prms must be complete and contain all parameters. \"\"\"",
"os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation is already run return -1, hashed_input",
"Solver(problem) # Read input from the command line parser = problem.define_command_line_options() parser =",
"and contain all parameters. \"\"\" pass def ok(self): \"\"\"Check if attr. prm, type,",
"('None' if U_L is None else inspect.getsource(U_L)) + \\ '_' + str(L) +",
"\\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if user_action is not None: if",
"+ '_' + \\ ('None' if U_0 is None else inspect.getsource(U_0)) + \\",
"is None: import argparse parser = argparse.ArgumentParser() for name in self.prm: tp =",
"t[n]) else: raise ValueError('version=%s' % version) # Insert boundary conditions i = Ix[0]",
"(0,L). We have u=U_0 or du/dn=0 on x=0, and u=u_L or du/dn=0 on",
"is None or I == 0: I = (lambda x: 0) if version",
"with the corresponding types, and self.help with the corresponding descriptions of parameters. self.type",
"= self.problem.U_L Nt = self.m.Nt t = np.linspace(0, T, Nt+1) # Mesh points",
"- \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i]",
"and dt are compatible with x and t dx = x[1] - x[0]",
"dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float, T=float) self.help = dict(L='1D domain', c='coefficient",
"return 0.5*self.u_exact(x, 0) def f(self, x, t): c = self['c'] return 2*(1+0.5*t)*c**2 def",
"f(x,t) with t in [0,T] and x in (0,L). The problem definition is",
"= self.m.d[0] I = self.problem.I V = self.problem.V f = self.problem.f U_0 =",
"2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t) U_L = None class Solver(Parameters): \"\"\"",
"- \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i]",
"+ '_' + str(c) + '_' + \\ ('None' if U_0 is None",
"not None: if isinstance(U_L, (float,int)) and U_L == 0: U_L = lambda t:",
"solution should be exactly reproduced when c is const. \"\"\" def __init__(self): self.prm",
"0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No",
"T = self.problem['L c T'.split()] L = L/2 # compute with half the",
"U_L(t[n+1]) if user_action is not None: if user_action(u, x, t, n+1): break cpu_time",
"Ix[-1] if U_L is None: im1 = i-1 ip1 = im1 u[i] =",
"but self.prms must be complete and contain all parameters. \"\"\" pass def ok(self):",
"shutil, os import numpy as np class Parameters(object): def __init__(self): \"\"\" Subclasses must",
"im1 # i+1 -> i-1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i]",
"isinstance(name, (list,tuple)): # get many? for n in name: if n not in",
"= dt*dt # Help variables in the scheme # Wrap user-given f, I,",
"parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args)",
"t): return self.u_exact(0, t) U_L = None class Solver(Parameters): \"\"\" Numerical parameters for",
"= np.linspace(0, L, Nx+1) # Mesh points in space t = np.linspace(0, T,",
"U_0 is None: # Set boundary values (x=0: i-1 -> i+1 since u[i-1]=u[i+1]",
"const. \"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float,",
"'_' + str(C) + '_' + str(T) + \\ '_' + str(stability_safety_factor) hashed_input",
"0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if",
"Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh function f will, after solving, contain",
"array if isinstance(c, (float,int)): c = np.zeros(x.shape) + c elif callable(c): # Call",
"x, t: 0) if version == 'scalar' else \\ lambda x, t: np.zeros(x.shape)",
"+ 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1]",
"init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve() print 'Check error.........................' solver.assert_no_error() if __name__",
"in name] else: if name not in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self,",
"with half the domain only (symmetry) C, Nx, stability_safety_factor = self[ 'C Nx",
"solution. This solution should be exactly reproduced, provided c is const. We simulate",
"with t in [0,T] and x in (0,L). The problem definition is implied",
"- u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n])",
"descriptions of parameters. self.type and self.help are optional, but self.prms must be complete",
"since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1 = ip1 u[i] = -",
"Load initial condition into u_1 for i in range(0,Nx+1): u_1[i] = I(x[i]) if",
"+ str(C) + '_' + str(T) + \\ '_' + str(stability_safety_factor) hashed_input =",
"u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i]",
"else: raise ValueError('version=%s' % version) # Insert boundary conditions i = Ix[0] if",
"For simplicity, we use a constant c here and compare with a known",
"It = range(0, Nt+1) # Load initial condition into u_1 for i in",
"exact solution. \"\"\" import time, glob, shutil, os import numpy as np class",
"c(x[i]) c = c_ q = c**2 C2 = (dt/dx)**2; dt2 = dt*dt",
"dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of spatial mesh points', stability_safety_factor='stability",
"+ q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1]",
"coding: utf-8 -*- \"\"\" Class implementation for solving of the wave equation u_tt",
"x: np.zeros(x.shape) if U_0 is not None: if isinstance(U_0, (float,int)) and U_0 ==",
"solving, contain # the solution for the whole domain and all time steps.",
"None else inspect.getsource(U_L)) + \\ '_' + str(L) + str(dt) + '_' +",
"c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t) U_L =",
"%s does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def",
"formula for the first step for i in Ix[1:-1]: u[i] = u_1[i] +",
"define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve()",
"c(x) as array if isinstance(c, (float,int)): c = np.zeros(x.shape) + c elif callable(c):",
"initialize self.prm with parameters and default values, self.type with the corresponding types, and",
"are defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and \\ hasattr(self, 'type')",
"0.5*self.u_exact(x, 0) def f(self, x, t): c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self,",
"u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T] and x in (0,L).",
"- \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version",
"for name in self.prm: tp = self.type[name] if name in self.type else str",
"'_' + str(c) + '_' + \\ ('None' if U_0 is None else",
"def __init__(self): \"\"\" Subclasses must initialize self.prm with parameters and default values, self.type",
"'scalar' else \\ lambda x: np.zeros(x.shape) if V is None or V ==",
"T'.split()] L = L/2 # only half the domain used (symmetry) x =",
"u=u_L or du/dn=0 on x = L. For simplicity, we use a constant",
"\"\"\"Check if attr. prm, type, and help are defined.\"\"\" if hasattr(self, 'prm') and",
"# Make sure dx and dt are compatible with x and t dx",
"num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): # ...use local variables to ease reading",
"our solution. This solution should be exactly reproduced, provided c is const. We",
"getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters for the wave equation u_tt =",
"to make code closer to mathematical # notation in computational scheme u_1 =",
"self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing to look up a",
"+ str(T) + \\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' +",
"Simulation is already run return -1, hashed_input # use local variables to make",
"L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\",
"self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt t = np.linspace(0, T, Nt+1) #",
"Ix = range(0, Nx+1) It = range(0, Nt+1) # Load initial condition into",
"ip1 # i-1 -> i+1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i]",
"f, I, V, U_0, U_L if None or 0 if f is None",
"time, glob, shutil, os import numpy as np class Parameters(object): def __init__(self): \"\"\"",
"must initialize self.prm with parameters and default values, self.type with the corresponding types,",
"self.type else str help = self.help[name] if name in self.help else None parser.add_argument(",
"parameters.\"\"\" for name in parameters: if name in self.prm: self.prm[name] = parameters[name] else:",
"C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) +",
"np.zeros(x.shape) if I is None or I == 0: I = (lambda x:",
"= self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t) U_L = None",
"numpy as np class Parameters(object): def __init__(self): \"\"\" Subclasses must initialize self.prm with",
"(symmetry) C, Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I",
"'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or more",
"= Ix[0] if U_0 is None: # Set boundary values (x=0: i-1 ->",
"+ f(x,t) with t in [0,T] and x in (0,L). We have u=U_0",
"parameters and default values, self.type with the corresponding types, and self.help with the",
"\\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0]",
"attr. prm, type, and help are defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm,",
"and x in (0,L). We have u=U_0 or du/dn=0 on x=0, and u=u_L",
"# Mesh points in space t = np.linspace(0, T, Nt+1) # Mesh points",
"i+1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i])",
"[0,T] and x in (0,L). We have u=U_0 or du/dn=0 on x=0, and",
"Nt+1) # Mesh points in time for n in range(len(t)): u_e = self.problem.u_exact(x,",
"L/2 # only half the domain used (symmetry) x = np.linspace(0, L, Nx+1)",
"+ q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if",
"return parser def init_from_command_line(self, args): for name in self.prm: self.prm[name] = getattr(args, name)",
"if V is None or V == 0: V = (lambda x: 0)",
"velocity) in PDE', T='end time of simulation') def u_exact(self, x, t): L =",
"lambda x, t: np.zeros(x.shape) if I is None or I == 0: I",
"dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant",
"the mathematical scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:] #",
"(symmetry) x = np.linspace(0, L, Nx+1) # Mesh points in space t =",
"'\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise",
"to look up a parameter.\"\"\" return self.get(name) def __setitem__(self, name, value): \"\"\" Allow",
"i = Ix[0] if U_0 is None: # Set boundary values # x=0:",
"ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys()))))",
"Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): # ...use local variables to ease",
"= c**2 C2 = (dt/dx)**2; dt2 = dt*dt # Help variables in the",
"and x in (0,L). The problem definition is implied by the method of",
"when du/dn = 0, on x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1 =",
"when du/dn=0 # x=L: i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 =",
"# Read input from the command line parser = problem.define_command_line_options() parser = solver.",
"inspect.getsource(V) + \\ '_' + inspect.getsource(f) + '_' + str(c) + '_' +",
"self.problem = problem self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float,",
"np.linspace(0, T, Nt+1) # Mesh points in time x = np.linspace(0, L, Nx+1)",
"else \\ lambda x: np.zeros(x.shape) if V is None or V == 0:",
"(float,int)): c = np.zeros(x.shape) + c elif callable(c): # Call c(x) and fill",
"version == 'scalar' else \\ lambda x: np.zeros(x.shape) if U_0 is not None:",
"\"\"\" Numerical parameters for solving the wave equation u_tt = (c**2*u_x)_x + f(x,t)",
"# Help variables in the scheme # Wrap user-given f, I, V, U_0,",
"f is None or f == 0: f = (lambda x, t: 0)",
"t) U_L = None class Solver(Parameters): \"\"\" Numerical parameters for solving the wave",
"0: U_L = lambda t: 0 # Make hash of all input data",
"and \\ isinstance(self.type, dict) and \\ hasattr(self, 'help') and \\ isinstance(self.help, dict): return",
"\\ isinstance(self.type, dict) and \\ hasattr(self, 'help') and \\ isinstance(self.help, dict): return True",
"- u_e).max() print 'diff:', diff tol = 1E-13 assert diff < tol def",
"self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))),",
"[0,T] and x in (0,L). The problem definition is implied by the method",
"- 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s'",
"import Mesh, Function # introduce some local help variables to ease reading L_end",
"self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters for the wave equation",
"name, default=self.get(name), metavar=name, type=tp, help=help) return parser def init_from_command_line(self, args): for name in",
"values # x=0: i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1",
"np class Parameters(object): def __init__(self): \"\"\" Subclasses must initialize self.prm with parameters and",
"None: # Set boundary values # x=0: i-1 -> i+1 since u[i-1]=u[i+1] when",
"Nt = self.m.Nt L, T = self.problem['L T'.split()] L = L/2 # only",
"user_action(u, x, t, n+1): break cpu_time = time.clock() - t0 return cpu_time, hashed_input",
"be complete and contain all parameters. \"\"\" pass def ok(self): \"\"\"Check if attr.",
"Ix[0] if U_0 is None: # Set boundary values (x=0: i-1 -> i+1",
"t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol = 1E-13 assert",
"scheme # Wrap user-given f, I, V, U_0, U_L if None or 0",
"error\"\"\" Nx = self['Nx'] Nt = self.m.Nt L, T = self.problem['L T'.split()] L",
"inspect.getsource(f) + '_' + str(c) + '_' + \\ ('None' if U_0 is",
"inspect.getsource(I) + '_' + inspect.getsource(V) + \\ '_' + inspect.getsource(f) + '_' +",
"as array if isinstance(c, (float,int)): c = np.zeros(x.shape) + c elif callable(c): #",
"return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t) U_L = None class Solver(Parameters):",
"i+1 since u[i-1]=u[i+1] # when du/dn = 0, on x=L: i+1 -> i-1",
"= L. For simplicity, we use a constant c here and compare with",
"(dt/dx)**2; dt2 = dt*dt # Help variables in the scheme # Wrap user-given",
"\"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser is None: import argparse",
"u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i = Ix[-1] if",
"+ \\ ('None' if U_0 is None else inspect.getsource(U_0)) + \\ ('None' if",
"parameters for solving the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t",
"% self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter name.\"\"\" raise ValueError(",
"2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] -",
"definition is implied by the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our",
"number', Nx='No of spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function",
"L = L/2 # compute with half the domain only (symmetry) C, Nx,",
"a constant c here and compare with a known exact solution. \"\"\" import",
"t[0] # Treat c(x) as array if isinstance(c, (float,int)): c = np.zeros(x.shape) +",
"in [0, L/2] and apply a symmetry condition at the end x=L/2. \"\"\"",
"def ok(self): \"\"\"Check if attr. prm, type, and help are defined.\"\"\" if hasattr(self,",
"'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I V = self.problem.V f",
"if version == 'scalar' else \\ lambda x, t: np.zeros(x.shape) if I is",
"\\ ('None' if U_0 is None else inspect.getsource(U_0)) + \\ ('None' if U_L",
"domain only (symmetry) C, Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx =",
"du/dn = 0, on x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1 = i+1",
"return -1, hashed_input # use local variables to make code closer to mathematical",
"or du/dn=0 on x = L. For simplicity, we use a constant c",
"ease reading L, c, T = self.problem['L c T'.split()] L = L/2 #",
"np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol = 1E-13 assert diff < tol",
"or du/dn=0 on x=0, and u=u_L or du/dn=0 on x = L. For",
"is None or V == 0: V = (lambda x: 0) if version",
"U_L = lambda t: 0 # Make hash of all input data import",
"- u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1] = -",
"t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh and check error\"\"\" Nx",
"(0,L). The problem definition is implied by the method of manufactured solution, choosing",
"problem definition is implied by the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as",
"lambda t: 0 # Make hash of all input data import hashlib, inspect",
"x, t: np.zeros(x.shape) if I is None or I == 0: I =",
"0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt)",
"raise ValueError('version=%s' % version) # Insert boundary conditions i = Ix[0] if U_0",
"and fill array c c_ = np.zeros(x.shape) for i in range(Nx+1): c_[i] =",
"not None: if user_action(u, x, t, n+1): break cpu_time = time.clock() - t0",
"Nx stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I V = self.problem.V f =",
"version) # Insert boundary conditions i = Ix[0] if U_0 is None: #",
"not registered.\\nLegal '\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set",
"u = self.f.u[1,:] import time; t0 = time.clock() # CPU time measurement Ix",
"+ \\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input +",
"half the domain only (symmetry) C, Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()]",
"- u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0])",
"Wrap user-given f, I, V, U_0, U_L if None or 0 if f",
"# use local variables to make code closer to mathematical # notation in",
"and u=u_L or du/dn=0 on x = L. For simplicity, we use a",
"t[1] - t[0] # Treat c(x) as array if isinstance(c, (float,int)): c =",
"def test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2)",
"the corresponding descriptions of parameters. self.type and self.help are optional, but self.prms must",
"in Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1]",
"+ \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] -",
"im1 = ip1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] +",
"self.u_exact(0, t) U_L = None class Solver(Parameters): \"\"\" Numerical parameters for solving the",
"= argparse.ArgumentParser() for name in self.prm: tp = self.type[name] if name in self.type",
"u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) -",
"__init__(self): \"\"\" Subclasses must initialize self.prm with parameters and default values, self.type with",
"i in range(Nx+1): c_[i] = c(x[i]) c = c_ q = c**2 C2",
"dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The",
"solver. init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve() print 'Check error.........................' solver.assert_no_error() if",
"parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve() print 'Check error.........................'",
"Nt+1) # Mesh points in time x = np.linspace(0, L, Nx+1) # Mesh",
"parameters. self.type and self.help are optional, but self.prms must be complete and contain",
"a symmetry condition at the end x=L/2. \"\"\" problem = Problem() solver =",
"reproduced, provided c is const. We simulate in [0, L/2] and apply a",
"def __getitem__(self, name): \"\"\"Allow obj[name] indexing to look up a parameter.\"\"\" return self.get(name)",
"u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1",
"= hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation is already run",
"exactly reproduced, provided c(x) is constant. We simulate in [0, L/2] and apply",
"self.help with the corresponding descriptions of parameters. self.type and self.help are optional, but",
"= None class Solver(Parameters): \"\"\" Numerical parameters for solving the wave equation u_tt",
"solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters ok?",
"= 0, on x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1",
"= (c**2*u_x)_x + f(x,t) with t in [0,T] and x in (0,L). The",
"u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if user_action is not",
"solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced when",
"name in parameters: if name in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def",
"first step for i in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) + \\",
"stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I V = self.problem.V f = self.problem.f",
"...use local variables to ease reading L, c, T = self.problem['L c T'.split()]",
"+ \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2]))",
"simulate in [0, L/2] and apply a symmetry condition at the end x=L/2.",
"return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing to look up a parameter.\"\"\"",
"# u corresponds to u^{n+1} in the mathematical scheme u_2 = self.f.u[n-1,:] u_1",
"= self.help[name] if name in self.help else None parser.add_argument( '--' + name, default=self.get(name),",
"def f(self, x, t): c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return",
"and check error\"\"\" Nx = self['Nx'] Nt = self.m.Nt L, T = self.problem['L",
"U_L is None: im1 = i-1 ip1 = im1 u[i] = - u_2[i]",
"= np.linspace(0, L, Nx+1) # Mesh points in space # Make sure dx",
"q = c**2 C2 = (dt/dx)**2; dt2 = dt*dt # Help variables in",
"defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and \\ hasattr(self, 'type') and",
"# introduce some local help variables to ease reading L_end = self.problem['L'] dx",
"when c is const. \"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type",
"since u[i-1]=u[i+1] # when du/dn = 0, on x=L: i+1 -> i-1 since",
"in time x = np.linspace(0, L, Nx+1) # Mesh points in space #",
"by the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution",
"= int(round(t_interval/float(dt))), T=t_interval) # The mesh function f will, after solving, contain #",
"the end x=L/2. \"\"\" problem = Problem() solver = Solver(problem) # Read input",
"set(self, **parameters): \"\"\"Set one or more parameters.\"\"\" for name in parameters: if name",
"dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i]",
"self.help = dict(L='1D domain', c='coefficient (wave velocity) in PDE', T='end time of simulation')",
"parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s' %",
"import argparse parser = argparse.ArgumentParser() for name in self.prm: tp = self.type[name] if",
"solving of the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in",
"= dict(L=float, c=float, T=float) self.help = dict(L='1D domain', c='coefficient (wave velocity) in PDE',",
"self.problem.U_L Nt = self.m.Nt t = np.linspace(0, T, Nt+1) # Mesh points in",
"boundary conditions i = Ix[0] if U_0 is None: # Set boundary values",
"= dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) #",
"== 0: V = (lambda x: 0) if version == 'scalar' else \\",
"'\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or",
"closer to mathematical # notation in computational scheme u_1 = self.f.u[0,:] u =",
"or I == 0: I = (lambda x: 0) if version == 'scalar'",
"test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that",
"= (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2],",
"manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced",
"stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I V",
"'.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or more parameters.\"\"\" for name in parameters:",
"self.problem.V f = self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt",
"q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if user_action",
"u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol",
"complete and contain all parameters. \"\"\" pass def ok(self): \"\"\"Check if attr. prm,",
"will, after solving, contain # the solution for the whole domain and all",
"if user_action is not None: user_action(u_1, x, t, 0) # Special formula for",
"(c**2*u_x)_x + f(x,t) with t in [0,T] and x in (0,L). We have",
"U_L = self.problem.U_L Nt = self.m.Nt t = np.linspace(0, T, Nt+1) # Mesh",
"= parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args() # parameters ok? solver.solve() print 'Check",
"U_0 is None else inspect.getsource(U_0)) + \\ ('None' if U_L is None else",
"get many? for n in name: if n not in self.prm: self._illegal_parameter(name) return",
"f will, after solving, contain # the solution for the whole domain and",
"corresponding descriptions of parameters. self.type and self.help are optional, but self.prms must be",
"not in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing to",
"condition at the end x=L/2. \"\"\" def __init__(self, problem): self.problem = problem self.prm",
"% (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or more parameters.\"\"\" for",
"domain used (symmetry) x = np.linspace(0, L, Nx+1) # Mesh points in space",
"i+1 -> i-1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1]",
"for name in parameters: if name in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name)",
"'scalar': for i in Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i] + \\",
"(wave velocity) in PDE', T='end time of simulation') def u_exact(self, x, t): L",
"the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about",
"range(0, Nt+1) # Load initial condition into u_1 for i in range(0,Nx+1): u_1[i]",
"equation u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T] and x in",
"dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval)",
"ease reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt =",
"solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced, provided",
"return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser is None: import argparse parser",
"\\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1]))",
"up a parameter.\"\"\" return self.get(name) def __setitem__(self, name, value): \"\"\" Allow obj[name] =",
"init_from_command_line(self, args): for name in self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\"",
"if isinstance(c, (float,int)): c = np.zeros(x.shape) + c elif callable(c): # Call c(x)",
"in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name] indexing to look",
"U_L == 0: U_L = lambda t: 0 # Make hash of all",
"self.f.u[0,:] u = self.f.u[1,:] import time; t0 = time.clock() # CPU time measurement",
"import time, glob, shutil, os import numpy as np class Parameters(object): def __init__(self):",
"x, t, 0) # Special formula for the first step for i in",
"dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version) # Insert boundary conditions i =",
"The problem definition is implied by the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2)",
"boundary values (x=0: i-1 -> i+1 since u[i-1]=u[i+1] # when du/dn = 0,",
"+ q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i",
"space t = np.linspace(0, T, Nt+1) # Mesh points in time for n",
"Special formula for the first step for i in Ix[1:-1]: u[i] = u_1[i]",
"+ \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if user_action is not None:",
"if isinstance(name, (list,tuple)): # get many? for n in name: if n not",
"function f will, after solving, contain # the solution for the whole domain",
"is None else inspect.getsource(U_0)) + \\ ('None' if U_L is None else inspect.getsource(U_L))",
"points if version == 'scalar': for i in Ix[1:-1]: u[i] = - u_2[i]",
"-> i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1 # i-1 ->",
"run return -1, hashed_input # use local variables to make code closer to",
"+ \\ ('None' if U_L is None else inspect.getsource(U_L)) + \\ '_' +",
"t, n+1): break cpu_time = time.clock() - t0 return cpu_time, hashed_input def assert_no_error(self):",
"I = (lambda x: 0) if version == 'scalar' else \\ lambda x:",
"c is const. We simulate in [0, L/2] and apply a symmetry condition",
"float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh function",
"in self.prm: tp = self.type[name] if name in self.type else str help =",
"- u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n])",
"Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] -",
"-> i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 -> i-1 since u[i+1]=u[i-1]",
"a known exact solution. \"\"\" import time, glob, shutil, os import numpy as",
"\\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version ==",
"0) if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if V is",
"== 'scalar' else \\ lambda x: np.zeros(x.shape) if V is None or V",
"input from the command line parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser) args",
"to assign a parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if",
"i in range(0,Nx+1): u_1[i] = I(x[i]) if user_action is not None: user_action(u_1, x,",
"str(dt) + '_' + str(C) + '_' + str(T) + \\ '_' +",
"if name in self.help else None parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp,",
"= np.linspace(0, T, Nt+1) # Mesh points in time x = np.linspace(0, L,",
"= self['Nx'] Nt = self.m.Nt L, T = self.problem['L T'.split()] L = L/2",
"at the end x=L/2. \"\"\" problem = Problem() solver = Solver(problem) # Read",
"# The mesh function f will, after solving, contain # the solution for",
"T=t_interval) # The mesh function f will, after solving, contain # the solution",
"+ \\ dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1] = - u_2[1:-1] +",
"# Set boundary values (x=0: i-1 -> i+1 since u[i-1]=u[i+1] # when du/dn",
"introduce some local help variables to ease reading L_end = self.problem['L'] dx =",
"values.\"\"\" if isinstance(name, (list,tuple)): # get many? for n in name: if n",
"1E-13 assert diff < tol def test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized",
"and U_L == 0: U_L = lambda t: 0 # Make hash of",
"in time for n in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:]",
"and self.help with the corresponding descriptions of parameters. self.type and self.help are optional,",
"a parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser is",
"0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0 is None: # Set boundary values",
"version == 'scalar' else \\ lambda x: np.zeros(x.shape) if V is None or",
"raise ValueError( 'The constructor in class %s does not '\\ 'initialize the\\ndictionaries '\\",
"isinstance(self.prm, dict) and \\ hasattr(self, 'type') and \\ isinstance(self.type, dict) and \\ hasattr(self,",
"I, V, U_0, U_L if None or 0 if f is None or",
"points in space # Make sure dx and dt are compatible with x",
"if user_action(u, x, t, n+1): break cpu_time = time.clock() - t0 return cpu_time,",
"if I is None or I == 0: I = (lambda x: 0)",
"help=help) return parser def init_from_command_line(self, args): for name in self.prm: self.prm[name] = getattr(args,",
"U_0(t[n+1]) i = Ix[-1] if U_L is None: im1 = i-1 ip1 =",
"if U_0 is not None: if isinstance(U_0, (float,int)) and U_0 == 0: U_0",
"if name not in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow obj[name]",
"def __setitem__(self, name, value): \"\"\" Allow obj[name] = value syntax to assign a",
"f = (lambda x, t: 0) if version == 'scalar' else \\ lambda",
"t): L = self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0) def",
"= c(x[i]) c = c_ q = c**2 C2 = (dt/dx)**2; dt2 =",
"is already run return -1, hashed_input # use local variables to make code",
"time of simulation') def u_exact(self, x, t): L = self['L'] return x*(L-x)*(1+0.5*t) def",
"Nx = self['Nx'] Nt = self.m.Nt L, T = self.problem['L T'.split()] L =",
"solution. \"\"\" import time, glob, shutil, os import numpy as np class Parameters(object):",
"exactly reproduced when c is const. \"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5,",
"ip1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] -",
"help are defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and \\ hasattr(self,",
"# Load initial condition into u_1 for i in range(0,Nx+1): u_1[i] = I(x[i])",
"are optional, but self.prms must be complete and contain all parameters. \"\"\" pass",
"is exactly reproduced, provided c(x) is constant. We simulate in [0, L/2] and",
"0 if f is None or f == 0: f = (lambda x,",
"None or 0 if f is None or f == 0: f =",
"self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help",
"self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol = 1E-13",
"x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1 #",
"'_' + \\ ('None' if U_0 is None else inspect.getsource(U_0)) + \\ ('None'",
"def _illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\"",
"Nt = self.m.Nt t = np.linspace(0, T, Nt+1) # Mesh points in time",
"u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i = Ix[-1] if",
"Set boundary values (x=0: i-1 -> i+1 since u[i-1]=u[i+1] # when du/dn =",
"time for n in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] -",
"self.type = dict(L=float, c=float, T=float) self.help = dict(L='1D domain', c='coefficient (wave velocity) in",
"c T'.split()] L = L/2 # compute with half the domain only (symmetry)",
"cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh and check error\"\"\" Nx = self['Nx']",
"conditions i = Ix[0] if U_0 is None: # Set boundary values #",
"'_' + str(L) + str(dt) + '_' + str(C) + '_' + str(T)",
"I(self, x): return self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x, 0) def f(self,",
"ok(self): \"\"\"Check if attr. prm, type, and help are defined.\"\"\" if hasattr(self, 'prm')",
"solution for the whole domain and all time steps. self.f = Function(self.m, num_comp=1,",
"utf-8 -*- \"\"\" Class implementation for solving of the wave equation u_tt =",
"str(C) + '_' + str(T) + \\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest()",
"self.problem['L c T'.split()] L = L/2 # compute with half the domain only",
"V is None or V == 0: V = (lambda x: 0) if",
"if U_0 is None: # Set boundary values (x=0: i-1 -> i+1 since",
"= self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt t = np.linspace(0, T, Nt+1)",
"assign a parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser",
"return [self.prm[n] for n in name] else: if name not in self.prm: self._illegal_parameter(name)",
"- u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i = Ix[-1]",
"= i-1 ip1 = im1 # i+1 -> i-1 u[i] = u_1[i] +",
"c is const. \"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type =",
"+ \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] -",
"not None: user_action(u_1, x, t, 0) # Special formula for the first step",
"+ \\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if user_action is not None:",
"illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s'",
"of spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function # introduce",
"time; t0 = time.clock() # CPU time measurement Ix = range(0, Nx+1) It",
"= np.zeros(x.shape) + c elif callable(c): # Call c(x) and fill array c",
"initial condition into u_1 for i in range(0,Nx+1): u_1[i] = I(x[i]) if user_action",
"+ q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\",
"PDE', T='end time of simulation') def u_exact(self, x, t): L = self['L'] return",
"not None: user_action(u, x, t, 1) for n in It[1:-1]: # u corresponds",
"Check the scalar and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly",
"with parameters and default values, self.type with the corresponding types, and self.help with",
"self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0) def V(self, x): return",
"= self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol =",
"(float,int)) and U_0 == 0: U_0 = lambda t: 0 if U_L is",
"are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters): \"\"\"Set one or more parameters.\"\"\"",
"is None: im1 = i-1 ip1 = im1 # i+1 -> i-1 u[i]",
"get(self, name): \"\"\"Get one or more parameter values.\"\"\" if isinstance(name, (list,tuple)): # get",
"mesh function f will, after solving, contain # the solution for the whole",
"== 'scalar' else \\ lambda x, t: np.zeros(x.shape) if I is None or",
"dx and dt are compatible with x and t dx = x[1] -",
"variables to make code closer to mathematical # notation in computational scheme u_1",
"0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) +",
"have u=U_0 or du/dn=0 on x=0, and u=u_L or du/dn=0 on x =",
"and \\ isinstance(self.help, dict): return True else: raise ValueError( 'The constructor in class",
"self.help else None parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp, help=help) return parser",
"0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) +",
"end x=L/2. \"\"\" problem = Problem() solver = Solver(problem) # Read input from",
"t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt",
"Problem() solver = Solver(problem) # Read input from the command line parser =",
"i+1 -> i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1 # i-1",
"since u[i-1]=u[i+1] when du/dn=0 # x=L: i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0",
"L/2 # compute with half the domain only (symmetry) C, Nx, stability_safety_factor =",
"if os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation is already run return -1,",
"u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise",
"inspect.getsource(U_0)) + \\ ('None' if U_L is None else inspect.getsource(U_L)) + \\ '_'",
"= - u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) -",
"self.prm: self._illegal_parameter(name) return [self.prm[n] for n in name] else: if name not in",
"x, t): L = self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0)",
"and \\ isinstance(self.prm, dict) and \\ hasattr(self, 'type') and \\ isinstance(self.type, dict) and",
"stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function # introduce some local help variables",
"user-given f, I, V, U_0, U_L if None or 0 if f is",
"u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1 = ip1 u[i] = - u_2[i]",
"Mesh, Function # introduce some local help variables to ease reading L_end =",
"Nx='No of spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function #",
"str(T) + \\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input",
"name.\"\"\" raise ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s' % (name,",
"T=18) self.type = dict(L=float, c=float, T=float) self.help = dict(L='1D domain', c='coefficient (wave velocity)",
"array c c_ = np.zeros(x.shape) for i in range(Nx+1): c_[i] = c(x[i]) c",
"if user_action is not None: if user_action(u, x, t, n+1): break cpu_time =",
"import hashlib, inspect data = inspect.getsource(I) + '_' + inspect.getsource(V) + \\ '_'",
"user_action is not None: user_action(u, x, t, 1) for n in It[1:-1]: #",
"def get(self, name): \"\"\"Get one or more parameter values.\"\"\" if isinstance(name, (list,tuple)): #",
"n not in self.prm: self._illegal_parameter(name) return [self.prm[n] for n in name] else: if",
"and \\ hasattr(self, 'help') and \\ isinstance(self.help, dict): return True else: raise ValueError(",
"u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:] # Update all inner",
"if None or 0 if f is None or f == 0: f",
"all input data import hashlib, inspect data = inspect.getsource(I) + '_' + inspect.getsource(V)",
"None: if isinstance(U_0, (float,int)) and U_0 == 0: U_0 = lambda t: 0",
"code closer to mathematical # notation in computational scheme u_1 = self.f.u[0,:] u",
"if f is None or f == 0: f = (lambda x, t:",
"= Ix[-1] if U_L is None: im1 = i-1 ip1 = im1 #",
"else: u[i] = U_0(t[n+1]) i = Ix[-1] if U_L is None: im1 =",
"self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x, 0) def f(self, x, t): c",
"= Solver(problem) # Read input from the command line parser = problem.define_command_line_options() parser",
"C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) +",
"0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt)",
"lambda x: np.zeros(x.shape) if U_0 is not None: if isinstance(U_0, (float,int)) and U_0",
"t: 0 # Make hash of all input data import hashlib, inspect data",
"# parameters ok? solver.solve() print 'Check error.........................' solver.assert_no_error() if __name__ == '__main__': test_quadratic_with_classes()",
"t in [0,T] and x in (0,L). We have u=U_0 or du/dn=0 on",
"None: if user_action(u, x, t, n+1): break cpu_time = time.clock() - t0 return",
"I == 0: I = (lambda x: 0) if version == 'scalar' else",
"= im1 # i+1 -> i-1 u[i] = u_1[i] + dt*V(x[i]) + \\",
"tol = 1E-13 assert diff < tol def test_quadratic_with_classes(): \"\"\" Check the scalar",
"i-1 ip1 = im1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i]",
"= problem.define_command_line_options() parser = solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print",
"name in self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters for",
"n in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max() print",
"u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1])",
"== 0: U_L = lambda t: 0 # Make hash of all input",
"c=1.5, T=18) self.type = dict(L=float, c=float, T=float) self.help = dict(L='1D domain', c='coefficient (wave",
"hashlib, inspect data = inspect.getsource(I) + '_' + inspect.getsource(V) + \\ '_' +",
"t[n]) elif version == 'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] + \\",
"method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be",
"at the end x=L/2. \"\"\" def __init__(self, problem): self.problem = problem self.prm =",
"(c**2*u_x)_x + f(x,t) with t in [0,T] and x in (0,L). The problem",
"# i-1 -> i+1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] +",
"- t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh and check error\"\"\"",
"np.linspace(0, T, Nt+1) # Mesh points in time for n in range(len(t)): u_e",
"Update all inner points if version == 'scalar': for i in Ix[1:-1]: u[i]",
"reproduced, provided c(x) is constant. We simulate in [0, L/2] and apply a",
"def init_from_command_line(self, args): for name in self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters):",
"type, and help are defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and",
"im1 = i-1 ip1 = im1 # i+1 -> i-1 u[i] = u_1[i]",
"= im1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1]",
"U_0 is None: # Set boundary values # x=0: i-1 -> i+1 since",
"\\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version) # Insert boundary conditions i",
"else None parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp, help=help) return parser def",
"(x=0: i-1 -> i+1 since u[i-1]=u[i+1] # when du/dn = 0, on x=L:",
"and t dx = x[1] - x[0] dt = t[1] - t[0] #",
"0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' %",
"for n in name: if n not in self.prm: self._illegal_parameter(name) return [self.prm[n] for",
"ValueError('version=%s' % version) # Insert boundary conditions i = Ix[0] if U_0 is",
"return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh and check error\"\"\" Nx =",
"\\ hasattr(self, 'type') and \\ isinstance(self.type, dict) and \\ hasattr(self, 'help') and \\",
"# the solution for the whole domain and all time steps. self.f =",
"+ q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version)",
"reading L, c, T = self.problem['L c T'.split()] L = L/2 # compute",
"indexing to look up a parameter.\"\"\" return self.get(name) def __setitem__(self, name, value): \"\"\"",
"T=float) self.help = dict(L='1D domain', c='coefficient (wave velocity) in PDE', T='end time of",
"x=0, and u=u_L or du/dn=0 on x = L. For simplicity, we use",
"q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1] =",
"(L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx],",
"+ c elif callable(c): # Call c(x) and fill array c c_ =",
"None: if isinstance(U_L, (float,int)) and U_L == 0: U_L = lambda t: 0",
"in PDE', T='end time of simulation') def u_exact(self, x, t): L = self['L']",
"versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x) is constant.",
"hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation is already run return",
"+ name, default=self.get(name), metavar=name, type=tp, help=help) return parser def init_from_command_line(self, args): for name",
"time measurement Ix = range(0, Nx+1) It = range(0, Nt+1) # Load initial",
"of all input data import hashlib, inspect data = inspect.getsource(I) + '_' +",
"-> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1 = ip1 u[i]",
"# compute with half the domain only (symmetry) C, Nx, stability_safety_factor = self[",
"Set boundary values # x=0: i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0 #",
"== 'scalar': for i in Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i] +",
"f(x,t) with t in [0,T] and x in (0,L). We have u=U_0 or",
"q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i],",
"\\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0 is None: # Set boundary",
"def V(self, x): return 0.5*self.u_exact(x, 0) def f(self, x, t): c = self['c']",
"spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function # introduce some",
"else: u[i] = U_L(dt) if user_action is not None: user_action(u, x, t, 1)",
"'_' + inspect.getsource(f) + '_' + str(c) + '_' + \\ ('None' if",
"x): return 0.5*self.u_exact(x, 0) def f(self, x, t): c = self['c'] return 2*(1+0.5*t)*c**2",
"'_' + inspect.getsource(V) + \\ '_' + inspect.getsource(f) + '_' + str(c) +",
"\\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] =",
"solve(self, user_action=None, version='scalar'): # ...use local variables to ease reading L, c, T",
"be exactly reproduced, provided c is const. We simulate in [0, L/2] and",
"provided c is const. We simulate in [0, L/2] and apply a symmetry",
"x: np.zeros(x.shape) if V is None or V == 0: V = (lambda",
"Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh function f will, after",
"0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if user_action is not None: user_action(u, x,",
"all inner points if version == 'scalar': for i in Ix[1:-1]: u[i] =",
"solution. This solution should be exactly reproduced when c is const. \"\"\" def",
"+ 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] +",
"if U_0 is None else inspect.getsource(U_0)) + \\ ('None' if U_L is None",
"'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def",
"if U_0 is None: # Set boundary values # x=0: i-1 -> i+1",
"else inspect.getsource(U_0)) + \\ ('None' if U_L is None else inspect.getsource(U_L)) + \\",
"-*- coding: utf-8 -*- \"\"\" Class implementation for solving of the wave equation",
"for the whole domain and all time steps. self.f = Function(self.m, num_comp=1, space_only=False)",
"+ q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0",
"The mesh function f will, after solving, contain # the solution for the",
"We have u=U_0 or du/dn=0 on x=0, and u=u_L or du/dn=0 on x",
"callable(c): # Call c(x) and fill array c c_ = np.zeros(x.shape) for i",
"self.f.u[n+1,:] # Update all inner points if version == 'scalar': for i in",
"Make sure dx and dt are compatible with x and t dx =",
"= Problem() solver = Solver(problem) # Read input from the command line parser",
"= self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0) def V(self, x):",
"+ f(x,t) with t in [0,T] and x in (0,L). The problem definition",
"Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of",
"on x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1",
"+ '_' + inspect.getsource(V) + \\ '_' + inspect.getsource(f) + '_' + str(c)",
"self._illegal_parameter(name) def get(self, name): \"\"\"Get one or more parameter values.\"\"\" if isinstance(name, (list,tuple)):",
"parser.parse_args() # parameters ok? solver.solve() print 'Check error.........................' solver.assert_no_error() if __name__ == '__main__':",
"space # Make sure dx and dt are compatible with x and t",
"reproduced when c is const. \"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18)",
"time x = np.linspace(0, L, Nx+1) # Mesh points in space # Make",
"t0 = time.clock() # CPU time measurement Ix = range(0, Nx+1) It =",
"if hasattr(self, 'prm') and \\ isinstance(self.prm, dict) and \\ hasattr(self, 'type') and \\",
"constant. We simulate in [0, L/2] and apply a symmetry condition at the",
"[self.prm[n] for n in name] else: if name not in self.prm: self._illegal_parameter(name) return",
"for n in range(len(t)): u_e = self.problem.u_exact(x, t[n]) diff = np.abs(self.f.u[n,:] - u_e).max()",
"U_L is None: im1 = i-1 ip1 = im1 # i+1 -> i-1",
"# Call c(x) and fill array c c_ = np.zeros(x.shape) for i in",
"hashed_input def assert_no_error(self): \"\"\"Run through mesh and check error\"\"\" Nx = self['Nx'] Nt",
"on x = L. For simplicity, we use a constant c here and",
"U_0 = self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt t = np.linspace(0, T,",
"dt = t[1] - t[0] # Treat c(x) as array if isinstance(c, (float,int)):",
"dict(C='Courant number', Nx='No of spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh,",
"= U_L(dt) if user_action is not None: user_action(u, x, t, 1) for n",
"dict) and \\ hasattr(self, 'help') and \\ isinstance(self.help, dict): return True else: raise",
"mesh points', stability_safety_factor='stability factor') from UniformFDMesh import Mesh, Function # introduce some local",
"version='scalar'): # ...use local variables to ease reading L, c, T = self.problem['L",
"scalar and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided",
"values, self.type with the corresponding types, and self.help with the corresponding descriptions of",
"**parameters): \"\"\"Set one or more parameters.\"\"\" for name in parameters: if name in",
"'--' + name, default=self.get(name), metavar=name, type=tp, help=help) return parser def init_from_command_line(self, args): for",
"in computational scheme u_1 = self.f.u[0,:] u = self.f.u[1,:] import time; t0 =",
"d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh function f will, after solving,",
"V = self.problem.V f = self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L Nt",
"= L/2 # compute with half the domain only (symmetry) C, Nx, stability_safety_factor",
"hasattr(self, 'type') and \\ isinstance(self.type, dict) and \\ hasattr(self, 'help') and \\ isinstance(self.help,",
"U_0, U_L if None or 0 if f is None or f ==",
"np.linspace(0, L, Nx+1) # Mesh points in space # Make sure dx and",
"q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if user_action",
"Treat c(x) as array if isinstance(c, (float,int)): c = np.zeros(x.shape) + c elif",
"C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\",
"are compatible with x and t dx = x[1] - x[0] dt =",
"+ '_archive.npz'): # Simulation is already run return -1, hashed_input # use local",
"if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if U_0 is not",
"u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1 # i-1 -> i+1 u[i] =",
"self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:] # Update all inner points if",
"def define_command_line_options(self, parser=None): self.ok() if parser is None: import argparse parser = argparse.ArgumentParser()",
"+ q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1],",
"0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1])",
"domain and all time steps. self.f = Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None,",
"+ q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if",
"if name in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get",
"_illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is",
"name in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one",
"'_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'): #",
"# Mesh points in time x = np.linspace(0, L, Nx+1) # Mesh points",
"-*- \"\"\" Class implementation for solving of the wave equation u_tt = (c**2*u_x)_x",
"dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m =",
"quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x) is constant. We simulate in",
"t[0]) else: u[i] = U_0(dt) i = Ix[-1] if U_L is None: im1",
"the domain only (symmetry) C, Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx",
"u[i-1]=u[i+1] # when du/dn = 0, on x=L: i+1 -> i-1 since u[i+1]=u[i-1])",
"T='end time of simulation') def u_exact(self, x, t): L = self['L'] return x*(L-x)*(1+0.5*t)",
"implied by the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution. This",
"= ip1 # i-1 -> i+1 u[i] = u_1[i] + dt*V(x[i]) + \\",
"solution should be exactly reproduced, provided c is const. We simulate in [0,",
"# ...use local variables to ease reading L, c, T = self.problem['L c",
"= getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters for the wave equation u_tt",
"x): return self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x, 0) def f(self, x,",
"else: if name not in self.prm: self._illegal_parameter(name) return self.prm[name] def __getitem__(self, name): \"\"\"Allow",
"hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation is already",
"c='coefficient (wave velocity) in PDE', T='end time of simulation') def u_exact(self, x, t):",
"i-1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i])",
"0: U_0 = lambda t: 0 if U_L is not None: if isinstance(U_L,",
"self.help = dict(C='Courant number', Nx='No of spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh",
"0) if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if U_0 is",
"stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of spatial mesh points', stability_safety_factor='stability factor') from",
"u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i]",
"in class %s does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' %",
"T = self.problem['L T'.split()] L = L/2 # only half the domain used",
"apply a symmetry condition at the end x=L/2. \"\"\" def __init__(self, problem): self.problem",
"after solving, contain # the solution for the whole domain and all time",
"L, c, T = self.problem['L c T'.split()] L = L/2 # compute with",
"+ q[im1])*(u_1[i] - u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i",
"inspect.getsource(U_L)) + \\ '_' + str(L) + str(dt) + '_' + str(C) +",
"u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i]",
"ip1 = i+1 im1 = ip1 # i-1 -> i+1 u[i] = u_1[i]",
"or f == 0: f = (lambda x, t: 0) if version ==",
"Solver(Parameters): \"\"\" Numerical parameters for solving the wave equation u_tt = (c**2*u_x)_x +",
"the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T] and",
"\"\"\"Get one or more parameter values.\"\"\" if isinstance(name, (list,tuple)): # get many? for",
"variables to ease reading L, c, T = self.problem['L c T'.split()] L =",
"of the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T]",
"is not None: if isinstance(U_0, (float,int)) and U_0 == 0: U_0 = lambda",
"L, T = self.problem['L T'.split()] L = L/2 # only half the domain",
"notation in computational scheme u_1 = self.f.u[0,:] u = self.f.u[1,:] import time; t0",
"if user_action is not None: user_action(u, x, t, 1) for n in It[1:-1]:",
"in parameters: if name in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self,",
"\"\"\" problem = Problem() solver = Solver(problem) # Read input from the command",
"None parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp, help=help) return parser def init_from_command_line(self,",
"= self.problem['L T'.split()] L = L/2 # only half the domain used (symmetry)",
"= L/2 # only half the domain used (symmetry) x = np.linspace(0, L,",
"u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1] = - u_2[1:-1]",
"c, T = self.problem['L c T'.split()] L = L/2 # compute with half",
"None: im1 = i-1 ip1 = im1 # i+1 -> i-1 u[i] =",
"help = self.help[name] if name in self.help else None parser.add_argument( '--' + name,",
"u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i",
"points in time x = np.linspace(0, L, Nx+1) # Mesh points in space",
"used (symmetry) x = np.linspace(0, L, Nx+1) # Mesh points in space t",
"\\ hasattr(self, 'help') and \\ isinstance(self.help, dict): return True else: raise ValueError( 'The",
"# Mesh points in time for n in range(len(t)): u_e = self.problem.u_exact(x, t[n])",
"np.zeros(x.shape) if V is None or V == 0: V = (lambda x:",
"u corresponds to u^{n+1} in the mathematical scheme u_2 = self.f.u[n-1,:] u_1 =",
"# Special formula for the first step for i in Ix[1:-1]: u[i] =",
"the first step for i in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) +",
"apply a symmetry condition at the end x=L/2. \"\"\" problem = Problem() solver",
"hasattr(self, 'help') and \\ isinstance(self.help, dict): return True else: raise ValueError( 'The constructor",
"u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version) # Insert boundary",
"Nx=int, stability_safety_factor=float) self.help = dict(C='Courant number', Nx='No of spatial mesh points', stability_safety_factor='stability factor')",
"= np.linspace(0, T, Nt+1) # Mesh points in time for n in range(len(t)):",
"or more parameter values.\"\"\" if isinstance(name, (list,tuple)): # get many? for n in",
"range(0, Nx+1) It = range(0, Nt+1) # Load initial condition into u_1 for",
"default=self.get(name), metavar=name, type=tp, help=help) return parser def init_from_command_line(self, args): for name in self.prm:",
"compatible with x and t dx = x[1] - x[0] dt = t[1]",
"tol def test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized versions for a quadratic",
"\\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) +",
"x=L: i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1 =",
"- x[0] dt = t[1] - t[0] # Treat c(x) as array if",
"q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i =",
"parameters: if name in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self, name):",
"= self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c'])",
"x[1] - x[0] dt = t[1] - t[0] # Treat c(x) as array",
"t: 0) if version == 'scalar' else \\ lambda x, t: np.zeros(x.shape) if",
"self.help[name] if name in self.help else None parser.add_argument( '--' + name, default=self.get(name), metavar=name,",
"- u_1[im1])) + \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_L(dt) if user_action is",
"== 'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:]",
"isinstance(c, (float,int)): c = np.zeros(x.shape) + c elif callable(c): # Call c(x) and",
"\"\"\" Class implementation for solving of the wave equation u_tt = (c**2*u_x)_x +",
"domain', c='coefficient (wave velocity) in PDE', T='end time of simulation') def u_exact(self, x,",
"argparse parser = argparse.ArgumentParser() for name in self.prm: tp = self.type[name] if name",
"constructor in class %s does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!'",
"version == 'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] +",
"im1 = i-1 ip1 = im1 u[i] = - u_2[i] + 2*u_1[i] +",
"x = np.linspace(0, L, Nx+1) # Mesh points in space t = np.linspace(0,",
"U_0(self, t): return self.u_exact(0, t) U_L = None class Solver(Parameters): \"\"\" Numerical parameters",
"and \\ hasattr(self, 'type') and \\ isinstance(self.type, dict) and \\ hasattr(self, 'help') and",
"c_ = np.zeros(x.shape) for i in range(Nx+1): c_[i] = c(x[i]) c = c_",
"None: # Set boundary values (x=0: i-1 -> i+1 since u[i-1]=u[i+1] # when",
"u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i])",
"'_' + str(T) + \\ '_' + str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.'",
"Numerical parameters for solving the wave equation u_tt = (c**2*u_x)_x + f(x,t) with",
"= self.m.Nt t = np.linspace(0, T, Nt+1) # Mesh points in time x",
"# i+1 -> i-1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] +",
"i-1 since u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1 # i-1 -> i+1",
"\\ ('None' if U_L is None else inspect.getsource(U_L)) + \\ '_' + str(L)",
"should be exactly reproduced when c is const. \"\"\" def __init__(self): self.prm =",
"contain all parameters. \"\"\" pass def ok(self): \"\"\"Check if attr. prm, type, and",
"of simulation') def u_exact(self, x, t): L = self['L'] return x*(L-x)*(1+0.5*t) def I(self,",
"input data import hashlib, inspect data = inspect.getsource(I) + '_' + inspect.getsource(V) +",
"= self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt t =",
"Physical parameters for the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t",
"steps. self.f = Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'): # ...use local",
"= self.problem['L c T'.split()] L = L/2 # compute with half the domain",
"q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i],",
"name in self.help else None parser.add_argument( '--' + name, default=self.get(name), metavar=name, type=tp, help=help)",
"Mesh points in space t = np.linspace(0, T, Nt+1) # Mesh points in",
"\"\"\"Set one or more parameters.\"\"\" for name in parameters: if name in self.prm:",
"self.problem.I V = self.problem.V f = self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L",
"scheme u_1 = self.f.u[0,:] u = self.f.u[1,:] import time; t0 = time.clock() #",
"x[0] dt = t[1] - t[0] # Treat c(x) as array if isinstance(c,",
"on x=0, and u=u_L or du/dn=0 on x = L. For simplicity, we",
"\"\"\" def __init__(self, problem): self.problem = problem self.prm = dict(C = 0.75, Nx=3,",
"and apply a symmetry condition at the end x=L/2. \"\"\" def __init__(self, problem):",
"__getitem__(self, name): \"\"\"Allow obj[name] indexing to look up a parameter.\"\"\" return self.get(name) def",
"self.type, self.help!' % self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter name.\"\"\"",
"in (0,L). We have u=U_0 or du/dn=0 on x=0, and u=u_L or du/dn=0",
"\\ '_' + str(L) + str(dt) + '_' + str(C) + '_' +",
"if isinstance(U_L, (float,int)) and U_L == 0: U_L = lambda t: 0 #",
"user_action(u, x, t, 1) for n in It[1:-1]: # u corresponds to u^{n+1}",
"compare with a known exact solution. \"\"\" import time, glob, shutil, os import",
"+ inspect.getsource(V) + \\ '_' + inspect.getsource(f) + '_' + str(c) + '_'",
"i = Ix[0] if U_0 is None: # Set boundary values (x=0: i-1",
"i-1 -> i+1 since u[i-1]=u[i+1] # when du/dn = 0, on x=L: i+1",
"mesh and check error\"\"\" Nx = self['Nx'] Nt = self.m.Nt L, T =",
"is not registered.\\nLegal '\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self, **parameters):",
"im1 u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] -",
"some local help variables to ease reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx'])",
"in [0,T] and x in (0,L). We have u=U_0 or du/dn=0 on x=0,",
"Help variables in the scheme # Wrap user-given f, I, V, U_0, U_L",
"in the scheme # Wrap user-given f, I, V, U_0, U_L if None",
"in self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\" Physical parameters for the",
"U_L = None class Solver(Parameters): \"\"\" Numerical parameters for solving the wave equation",
"+ dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] +",
"dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i = Ix[-1] if U_L is None:",
"class Problem(Parameters): \"\"\" Physical parameters for the wave equation u_tt = (c**2*u_x)_x +",
"= dict(L='1D domain', c='coefficient (wave velocity) in PDE', T='end time of simulation') def",
"a symmetry condition at the end x=L/2. \"\"\" def __init__(self, problem): self.problem =",
"dx = x[1] - x[0] dt = t[1] - t[0] # Treat c(x)",
"# only half the domain used (symmetry) x = np.linspace(0, L, Nx+1) #",
"for n in name] else: if name not in self.prm: self._illegal_parameter(name) return self.prm[name]",
"This solution should be exactly reproduced when c is const. \"\"\" def __init__(self):",
"T, Nt+1) # Mesh points in time x = np.linspace(0, L, Nx+1) #",
"= self.problem.I V = self.problem.V f = self.problem.f U_0 = self.problem.U_0 U_L =",
"inspect data = inspect.getsource(I) + '_' + inspect.getsource(V) + \\ '_' + inspect.getsource(f)",
"u_1 = self.f.u[n,:] u = self.f.u[n+1,:] # Update all inner points if version",
"n in It[1:-1]: # u corresponds to u^{n+1} in the mathematical scheme u_2",
"for solving the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in",
"u[i] = U_0(t[n+1]) i = Ix[-1] if U_L is None: im1 = i-1",
"condition at the end x=L/2. \"\"\" problem = Problem() solver = Solver(problem) #",
"i in Ix[1:-1]: u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] +",
"# Insert boundary conditions i = Ix[0] if U_0 is None: # Set",
"problem): self.problem = problem self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type =",
"= x[1] - x[0] dt = t[1] - t[0] # Treat c(x) as",
"syntax to assign a parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok()",
"implementation for solving of the wave equation u_tt = (c**2*u_x)_x + f(x,t) with",
"and default values, self.type with the corresponding types, and self.help with the corresponding",
"self.prm with parameters and default values, self.type with the corresponding types, and self.help",
"self.type with the corresponding types, and self.help with the corresponding descriptions of parameters.",
"dt are compatible with x and t dx = x[1] - x[0] dt",
"ValueError( 'The constructor in class %s does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm,",
"u[i] = - u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i])",
"in name: if n not in self.prm: self._illegal_parameter(name) return [self.prm[n] for n in",
"# notation in computational scheme u_1 = self.f.u[0,:] u = self.f.u[1,:] import time;",
"0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1])",
"and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is exactly reproduced, provided c(x)",
"line parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver.",
"'The constructor in class %s does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type,",
"V, U_0, U_L if None or 0 if f is None or f",
"boundary values # x=0: i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0 # x=L:",
"= i-1 ip1 = im1 u[i] = - u_2[i] + 2*u_1[i] + \\",
"\"%s\" is not registered.\\nLegal '\\ 'parameters are\\n%s' % (name, ' '.join(list(self.prm.keys())))) def set(self,",
"value): \"\"\" Allow obj[name] = value syntax to assign a parameter's value. \"\"\"",
"t, 1) for n in It[1:-1]: # u corresponds to u^{n+1} in the",
"# Wrap user-given f, I, V, U_0, U_L if None or 0 if",
"= c_ q = c**2 C2 = (dt/dx)**2; dt2 = dt*dt # Help",
"is None else inspect.getsource(U_L)) + \\ '_' + str(L) + str(dt) + '_'",
"elif version == 'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1]",
"2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i]",
"u[i] = U_L(t[n+1]) if user_action is not None: if user_action(u, x, t, n+1):",
"L. For simplicity, we use a constant c here and compare with a",
"is None: # Set boundary values # x=0: i-1 -> i+1 since u[i-1]=u[i+1]",
"is constant. We simulate in [0, L/2] and apply a symmetry condition at",
"def I(self, x): return self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x, 0) def",
"'help') and \\ isinstance(self.help, dict): return True else: raise ValueError( 'The constructor in",
"Call c(x) and fill array c c_ = np.zeros(x.shape) for i in range(Nx+1):",
"for i in range(Nx+1): c_[i] = c(x[i]) c = c_ q = c**2",
"when du/dn=0 ip1 = i+1 im1 = ip1 u[i] = - u_2[i] +",
"choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced, provided c",
"if parser is None: import argparse parser = argparse.ArgumentParser() for name in self.prm:",
"None else inspect.getsource(U_0)) + \\ ('None' if U_L is None else inspect.getsource(U_L)) +",
"\\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] + q[im1])*(u_1[i] - u_1[im1]))",
"self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh function f",
"args): for name in self.prm: self.prm[name] = getattr(args, name) class Problem(Parameters): \"\"\" Physical",
"= i+1 im1 = ip1 u[i] = - u_2[i] + 2*u_1[i] + \\",
"i = Ix[-1] if U_L is None: im1 = i-1 ip1 = im1",
"self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/ \\ float(self.problem['c']) self.m",
"problem.define_command_line_options() parser = solver. define_command_line_options(parser) args = parser.parse_args() problem.init_from_command_line(args) solver. init_from_command_line(args) print parser.parse_args()",
"\"\"\" Check the scalar and vectorized versions for a quadratic u(x,t)=x(L-x)(1+t/2) that is",
"def set(self, **parameters): \"\"\"Set one or more parameters.\"\"\" for name in parameters: if",
"in the mathematical scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:]",
"U_0 = lambda t: 0 if U_L is not None: if isinstance(U_L, (float,int))",
"\"\"\" Allow obj[name] = value syntax to assign a parameter's value. \"\"\" return",
"0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1])) + \\ dt2*f(x[i], t[n]) elif version == 'vectorized':",
"+ \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version) # Insert boundary conditions",
"hashed_input # use local variables to make code closer to mathematical # notation",
"ip1 = i+1 im1 = ip1 u[i] = - u_2[i] + 2*u_1[i] +",
"c(x) and fill array c c_ = np.zeros(x.shape) for i in range(Nx+1): c_[i]",
"np.zeros(x.shape) for i in range(Nx+1): c_[i] = c(x[i]) c = c_ q =",
"L = self['L'] return x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0) def V(self,",
"True else: raise ValueError( 'The constructor in class %s does not '\\ 'initialize",
"in self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one or",
"and compare with a known exact solution. \"\"\" import time, glob, shutil, os",
"import numpy as np class Parameters(object): def __init__(self): \"\"\" Subclasses must initialize self.prm",
"# Update all inner points if version == 'scalar': for i in Ix[1:-1]:",
"t = np.linspace(0, T, Nt+1) # Mesh points in time x = np.linspace(0,",
"T, Nt+1) # Mesh points in time for n in range(len(t)): u_e =",
"self.type and self.help are optional, but self.prms must be complete and contain all",
"compute with half the domain only (symmetry) C, Nx, stability_safety_factor = self[ 'C",
"q[i-1])*(u_1[i] - u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0 is",
"None or I == 0: I = (lambda x: 0) if version ==",
"= (dt/dx)**2; dt2 = dt*dt # Help variables in the scheme # Wrap",
"exactly reproduced, provided c is const. We simulate in [0, L/2] and apply",
"= self.m.Nt L, T = self.problem['L T'.split()] L = L/2 # only half",
"else \\ lambda x, t: np.zeros(x.shape) if I is None or I ==",
"x, t): c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t)",
"= problem self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type = dict(C=float, Nx=int,",
"many? for n in name: if n not in self.prm: self._illegal_parameter(name) return [self.prm[n]",
"self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser is None: import argparse parser =",
"self.prm: tp = self.type[name] if name in self.type else str help = self.help[name]",
"diff = np.abs(self.f.u[n,:] - u_e).max() print 'diff:', diff tol = 1E-13 assert diff",
"corresponds to u^{n+1} in the mathematical scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:]",
"\\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1]))",
"parser def init_from_command_line(self, args): for name in self.prm: self.prm[name] = getattr(args, name) class",
"use a constant c here and compare with a known exact solution. \"\"\"",
"\\ '_' + inspect.getsource(f) + '_' + str(c) + '_' + \\ ('None'",
"x: 0) if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if V",
"= U_0(dt) i = Ix[-1] if U_L is None: im1 = i-1 ip1",
"or more parameters.\"\"\" for name in parameters: if name in self.prm: self.prm[name] =",
"the end x=L/2. \"\"\" def __init__(self, problem): self.problem = problem self.prm = dict(C",
"or V == 0: V = (lambda x: 0) if version == 'scalar'",
"and apply a symmetry condition at the end x=L/2. \"\"\" problem = Problem()",
"mathematical scheme u_2 = self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:] # Update",
"range(0,Nx+1): u_1[i] = I(x[i]) if user_action is not None: user_action(u_1, x, t, 0)",
"U_L is not None: if isinstance(U_L, (float,int)) and U_L == 0: U_L =",
"prm, type, and help are defined.\"\"\" if hasattr(self, 'prm') and \\ isinstance(self.prm, dict)",
"if n not in self.prm: self._illegal_parameter(name) return [self.prm[n] for n in name] else:",
"condition into u_1 for i in range(0,Nx+1): u_1[i] = I(x[i]) if user_action is",
"= dict(C='Courant number', Nx='No of spatial mesh points', stability_safety_factor='stability factor') from UniformFDMesh import",
"- u_2[i] + 2*u_1[i] + \\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\",
"def assert_no_error(self): \"\"\"Run through mesh and check error\"\"\" Nx = self['Nx'] Nt =",
"\\ float(self.problem['c']) self.m = Mesh(L=[0,L_end/2], d=[dx], Nt = int(round(t_interval/float(dt))), T=t_interval) # The mesh",
"du/dn=0 ip1 = i+1 im1 = ip1 u[i] = - u_2[i] + 2*u_1[i]",
"t dx = x[1] - x[0] dt = t[1] - t[0] # Treat",
"data = inspect.getsource(I) + '_' + inspect.getsource(V) + \\ '_' + inspect.getsource(f) +",
"+ hashed_input + '_archive.npz'): # Simulation is already run return -1, hashed_input #",
"0) def V(self, x): return 0.5*self.u_exact(x, 0) def f(self, x, t): c =",
"as our solution. This solution should be exactly reproduced, provided c is const.",
"I = self.problem.I V = self.problem.V f = self.problem.f U_0 = self.problem.U_0 U_L",
"only (symmetry) C, Nx, stability_safety_factor = self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0]",
"variables in the scheme # Wrap user-given f, I, V, U_0, U_L if",
"f(self, x, t): c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0,",
"is const. We simulate in [0, L/2] and apply a symmetry condition at",
"\"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float, T=float)",
"+ str(dt) + '_' + str(C) + '_' + str(T) + \\ '_'",
"self.prm: self.prm[name] = parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one or more",
"= self[ 'C Nx stability_safety_factor'.split()] dx = self.m.d[0] I = self.problem.I V =",
"__init__(self, problem): self.problem = problem self.prm = dict(C = 0.75, Nx=3, stability_safety_factor=1.0) self.type",
"not None: if isinstance(U_0, (float,int)) and U_0 == 0: U_0 = lambda t:",
"i+1 -> i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1 = ip1",
"break cpu_time = time.clock() - t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through",
"= t[1] - t[0] # Treat c(x) as array if isinstance(c, (float,int)): c",
"const. We simulate in [0, L/2] and apply a symmetry condition at the",
"\\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i = Ix[-1] if U_L is",
"- u_1[:-2])) + \\ dt2*f(x[1:-1], t[n]) else: raise ValueError('version=%s' % version) # Insert",
"Nx+1) # Mesh points in space t = np.linspace(0, T, Nt+1) # Mesh",
"x: 0) if version == 'scalar' else \\ lambda x: np.zeros(x.shape) if U_0",
"str(stability_safety_factor) hashed_input = hashlib.sha1(data).hexdigest() if os.path.isfile('.' + hashed_input + '_archive.npz'): # Simulation is",
"values (x=0: i-1 -> i+1 since u[i-1]=u[i+1] # when du/dn = 0, on",
"only half the domain used (symmetry) x = np.linspace(0, L, Nx+1) # Mesh",
"= U_0(t[n+1]) i = Ix[-1] if U_L is None: im1 = i-1 ip1",
"\\ dt2*f(x[i], t[n]) elif version == 'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1]",
"# Mesh points in space # Make sure dx and dt are compatible",
"if attr. prm, type, and help are defined.\"\"\" if hasattr(self, 'prm') and \\",
"with a known exact solution. \"\"\" import time, glob, shutil, os import numpy",
"os import numpy as np class Parameters(object): def __init__(self): \"\"\" Subclasses must initialize",
"in Ix[1:-1]: u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] -",
"corresponding types, and self.help with the corresponding descriptions of parameters. self.type and self.help",
"all parameters. \"\"\" pass def ok(self): \"\"\"Check if attr. prm, type, and help",
"-> i-1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] -",
"is implied by the method of manufactured solution, choosing u(x,t)=x(L-x)(1+t/2) as our solution.",
"# Set boundary values # x=0: i-1 -> i+1 since u[i-1]=u[i+1] when du/dn=0",
"- u_1[i-1])) + \\ 0.5*dt2*f(x[i], t[0]) i = Ix[0] if U_0 is None:",
"None: user_action(u_1, x, t, 0) # Special formula for the first step for",
"dict(L=float, c=float, T=float) self.help = dict(L='1D domain', c='coefficient (wave velocity) in PDE', T='end",
"self._illegal_parameter(name) return [self.prm[n] for n in name] else: if name not in self.prm:",
"class %s does not '\\ 'initialize the\\ndictionaries '\\ 'self.prm, self.type, self.help!' % self.__class__.__name__)",
"== 0: I = (lambda x: 0) if version == 'scalar' else \\",
"diff < tol def test_quadratic_with_classes(): \"\"\" Check the scalar and vectorized versions for",
"computational scheme u_1 = self.f.u[0,:] u = self.f.u[1,:] import time; t0 = time.clock()",
"np.zeros(x.shape) if U_0 is not None: if isinstance(U_0, (float,int)) and U_0 == 0:",
"# Simulation is already run return -1, hashed_input # use local variables to",
"glob, shutil, os import numpy as np class Parameters(object): def __init__(self): \"\"\" Subclasses",
"str(c) + '_' + \\ ('None' if U_0 is None else inspect.getsource(U_0)) +",
"from UniformFDMesh import Mesh, Function # introduce some local help variables to ease",
"= lambda t: 0 # Make hash of all input data import hashlib,",
"+ inspect.getsource(f) + '_' + str(c) + '_' + \\ ('None' if U_0",
"simplicity, we use a constant c here and compare with a known exact",
"0: V = (lambda x: 0) if version == 'scalar' else \\ lambda",
"None: im1 = i-1 ip1 = im1 u[i] = - u_2[i] + 2*u_1[i]",
"= time.clock() - t0 return cpu_time, hashed_input def assert_no_error(self): \"\"\"Run through mesh and",
"since u[i+1]=u[i-1]) ip1 = i+1 im1 = ip1 # i-1 -> i+1 u[i]",
"im1 = ip1 # i-1 -> i+1 u[i] = u_1[i] + dt*V(x[i]) +",
"name): \"\"\"Raise exception about illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is not",
"np.zeros(x.shape) + c elif callable(c): # Call c(x) and fill array c c_",
"= (lambda x, t: 0) if version == 'scalar' else \\ lambda x,",
"half the domain used (symmetry) x = np.linspace(0, L, Nx+1) # Mesh points",
"n in name] else: if name not in self.prm: self._illegal_parameter(name) return self.prm[name] def",
"# CPU time measurement Ix = range(0, Nx+1) It = range(0, Nt+1) #",
"symmetry condition at the end x=L/2. \"\"\" def __init__(self, problem): self.problem = problem",
"self.get(name) def __setitem__(self, name, value): \"\"\" Allow obj[name] = value syntax to assign",
"+ \\ 0.5*dt2*f(x[i], t[0]) else: u[i] = U_0(dt) i = Ix[-1] if U_L",
"t[n]) else: u[i] = U_L(t[n+1]) if user_action is not None: if user_action(u, x,",
"'scalar' else \\ lambda x, t: np.zeros(x.shape) if I is None or I",
"\\ C2*(0.5*(q[i] + q[i+1])*(u_1[i+1] - u_1[i]) - \\ 0.5*(q[i] + q[i-1])*(u_1[i] - u_1[i-1]))",
"0 # Make hash of all input data import hashlib, inspect data =",
"self['Nx'] Nt = self.m.Nt L, T = self.problem['L T'.split()] L = L/2 #",
"self.problem['L T'.split()] L = L/2 # only half the domain used (symmetry) x",
"'prm') and \\ isinstance(self.prm, dict) and \\ hasattr(self, 'type') and \\ isinstance(self.type, dict)",
"n in name: if n not in self.prm: self._illegal_parameter(name) return [self.prm[n] for n",
"None: import argparse parser = argparse.ArgumentParser() for name in self.prm: tp = self.type[name]",
"= Ix[-1] if U_L is None: im1 = i-1 ip1 = im1 u[i]",
"symmetry condition at the end x=L/2. \"\"\" problem = Problem() solver = Solver(problem)",
"else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one or more parameter values.\"\"\" if isinstance(name,",
"0: f = (lambda x, t: 0) if version == 'scalar' else \\",
"dict): return True else: raise ValueError( 'The constructor in class %s does not",
"x in (0,L). The problem definition is implied by the method of manufactured",
"Read input from the command line parser = problem.define_command_line_options() parser = solver. define_command_line_options(parser)",
"local variables to make code closer to mathematical # notation in computational scheme",
"more parameter values.\"\"\" if isinstance(name, (list,tuple)): # get many? for n in name:",
"+ \\ '_' + str(L) + str(dt) + '_' + str(C) + '_'",
"t): c = self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t) U_L",
"'_archive.npz'): # Simulation is already run return -1, hashed_input # use local variables",
"isinstance(self.type, dict) and \\ hasattr(self, 'help') and \\ isinstance(self.help, dict): return True else:",
"= lambda t: 0 if U_L is not None: if isinstance(U_L, (float,int)) and",
"- u_1[im1])) + \\ dt2*f(x[i], t[n]) else: u[i] = U_L(t[n+1]) if user_action is",
"isinstance(U_0, (float,int)) and U_0 == 0: U_0 = lambda t: 0 if U_L",
"in range(Nx+1): c_[i] = c(x[i]) c = c_ q = c**2 C2 =",
"points in space t = np.linspace(0, T, Nt+1) # Mesh points in time",
"L/2] and apply a symmetry condition at the end x=L/2. \"\"\" def __init__(self,",
"c=float, T=float) self.help = dict(L='1D domain', c='coefficient (wave velocity) in PDE', T='end time",
"and all time steps. self.f = Function(self.m, num_comp=1, space_only=False) def solve(self, user_action=None, version='scalar'):",
"= i+1 im1 = ip1 # i-1 -> i+1 u[i] = u_1[i] +",
"= self.f.u[0,:] u = self.f.u[1,:] import time; t0 = time.clock() # CPU time",
"our solution. This solution should be exactly reproduced when c is const. \"\"\"",
"i-1 since u[i+1]=u[i-1] when du/dn=0 ip1 = i+1 im1 = ip1 u[i] =",
"'vectorized': u[1:-1] = - u_2[1:-1] + 2*u_1[1:-1] + \\ C2*(0.5*(q[1:-1] + q[2:])*(u_1[2:] -",
"parameters[name] else: self._illegal_parameter(name) def get(self, name): \"\"\"Get one or more parameter values.\"\"\" if",
"= (lambda x: 0) if version == 'scalar' else \\ lambda x: np.zeros(x.shape)",
"c elif callable(c): # Call c(x) and fill array c c_ = np.zeros(x.shape)",
"pass def ok(self): \"\"\"Check if attr. prm, type, and help are defined.\"\"\" if",
"f = self.problem.f U_0 = self.problem.U_0 U_L = self.problem.U_L Nt = self.m.Nt t",
"x=L/2. \"\"\" def __init__(self, problem): self.problem = problem self.prm = dict(C = 0.75,",
"V = (lambda x: 0) if version == 'scalar' else \\ lambda x:",
"-> i+1 u[i] = u_1[i] + dt*V(x[i]) + \\ 0.5*C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] -",
"def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type = dict(L=float, c=float, T=float) self.help",
"is const. \"\"\" def __init__(self): self.prm = dict(L=2.5, c=1.5, T=18) self.type = dict(L=float,",
"# get many? for n in name: if n not in self.prm: self._illegal_parameter(name)",
"else: u[i] = U_L(t[n+1]) if user_action is not None: if user_action(u, x, t,",
"class Parameters(object): def __init__(self): \"\"\" Subclasses must initialize self.prm with parameters and default",
"(list,tuple)): # get many? for n in name: if n not in self.prm:",
"dt2 = dt*dt # Help variables in the scheme # Wrap user-given f,",
"self.f.u[n,:] u = self.f.u[n+1,:] # Update all inner points if version == 'scalar':",
"L, Nx+1) # Mesh points in space t = np.linspace(0, T, Nt+1) #",
"+ \\ dt2*f(x[i], t[n]) else: u[i] = U_0(t[n+1]) i = Ix[-1] if U_L",
"c_ q = c**2 C2 = (dt/dx)**2; dt2 = dt*dt # Help variables",
"f == 0: f = (lambda x, t: 0) if version == 'scalar'",
"make code closer to mathematical # notation in computational scheme u_1 = self.f.u[0,:]",
"a parameter.\"\"\" return self.get(name) def __setitem__(self, name, value): \"\"\" Allow obj[name] = value",
"self['c'] return 2*(1+0.5*t)*c**2 def U_0(self, t): return self.u_exact(0, t) U_L = None class",
"\"\"\" import time, glob, shutil, os import numpy as np class Parameters(object): def",
"value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser is None: import",
"c(x) is constant. We simulate in [0, L/2] and apply a symmetry condition",
"should be exactly reproduced, provided c is const. We simulate in [0, L/2]",
"about illegal parameter name.\"\"\" raise ValueError( 'parameter \"%s\" is not registered.\\nLegal '\\ 'parameters",
"value syntax to assign a parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None):",
"\\ lambda x: np.zeros(x.shape) if U_0 is not None: if isinstance(U_0, (float,int)) and",
"= self.f.u[n+1,:] # Update all inner points if version == 'scalar': for i",
"problem = Problem() solver = Solver(problem) # Read input from the command line",
"# when du/dn = 0, on x=L: i+1 -> i-1 since u[i+1]=u[i-1]) ip1",
"elif callable(c): # Call c(x) and fill array c c_ = np.zeros(x.shape) for",
"= self.f.u[n-1,:] u_1 = self.f.u[n,:] u = self.f.u[n+1,:] # Update all inner points",
"+ 2*u_1[i] + \\ C2*(0.5*(q[i] + q[ip1])*(u_1[ip1] - u_1[i]) - \\ 0.5*(q[i] +",
"x*(L-x)*(1+0.5*t) def I(self, x): return self.u_exact(x, 0) def V(self, x): return 0.5*self.u_exact(x, 0)",
"reading L_end = self.problem['L'] dx = (L_end/2)/float(self['Nx']) t_interval = self.problem['T'] dt = dx*self['stability_safety_factor']*self['C']/",
"int(round(t_interval/float(dt))), T=t_interval) # The mesh function f will, after solving, contain # the",
"choosing u(x,t)=x(L-x)(1+t/2) as our solution. This solution should be exactly reproduced when c",
"self.__class__.__name__) def _illegal_parameter(self, name): \"\"\"Raise exception about illegal parameter name.\"\"\" raise ValueError( 'parameter",
"q[2:])*(u_1[2:] - u_1[1:-1]) - 0.5*(q[1:-1] + q[:-2])*(u_1[1:-1] - u_1[:-2])) + \\ dt2*f(x[1:-1], t[n])",
"t[n]) else: u[i] = U_0(t[n+1]) i = Ix[-1] if U_L is None: im1",
"for the wave equation u_tt = (c**2*u_x)_x + f(x,t) with t in [0,T]",
"parameter's value. \"\"\" return self.set(name=value) def define_command_line_options(self, parser=None): self.ok() if parser is None:",
"T'.split()] L = L/2 # compute with half the domain only (symmetry) C,"
] |
[
"= np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1",
"= [] Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow = [] KplusBcT =",
"= [] KplusBcT = [] BcKplus_tmp = [] # BcK_dense = [] K_UT",
"np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_) #",
"print str0,i,j if (makeSymmetric): logInd = J != I; I = np.concatenate((I,J[logInd])) J",
"np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1)",
"= BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) #",
"K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD",
"np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1)",
"# # # # # # # # # # # # ##plt.spy(Fc_clust,markersize",
"%d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: # markersize =",
"## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i] ##",
"= Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i",
"= [] # Lumped = [] # Lumped = [] for i in",
"= Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus =",
"gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc",
"Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1):",
"== 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub",
"np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b)",
"##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered) = %d \") % (nnz0, nnzR)",
"[] K_reg = [] Fc = [] R = [] Rf = []",
"Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1):",
"# plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: # markersize = 3 # else:",
"0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) #",
"##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust",
"xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub - 1): if (i ==",
"tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp =",
"[] Gc = [] # Gf = [] Gf_p = [] Gc =",
"R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) #",
"ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1",
"## ## if (i == 0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python =",
"= %d, nnz(reordered) = %d \") % (nnz0, nnzR) # # ##plt.show() #",
"= load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac =",
"## Bf_red = Bf[indBf,:] ## ## Rc = RList[i].toarray() ## ## ## ##",
"np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 /",
"float) for i in range(len(f0)): line = f0[i] k = line.split() tmp[i,0] =",
"np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else:",
"load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd",
"i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1)",
"##Fc_python_List = [] # ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for",
"in range(nSub - 1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g",
"## ## ## if (i == 0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python",
"Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ##",
"path0 = \"../data\" if 1: K = [] K_reg = [] Fc =",
"= tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j if",
"rhs = [] xx = [] Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow",
"100: # markersize_ = 3 # else: # markersize_ = 0.7 # plt.spy(GcTGc,",
"/ ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust)",
"= np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD",
"Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes=",
"\"norm = %3.8e \\n\" % np.linalg.norm(KKpK - K[i]) # # # # #",
"# ##Fc_python_List = [] # ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ##",
"f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3), dtype = float) for i",
"= [] Rf = [] Bc = [] Bf = [] BcT_dense =",
"dtype = float) for i in range(len(f0)): line = f0[i] k = line.split()",
"= V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt'",
"Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 =",
"x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # #",
"= load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) #",
"# ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test =",
"== 0): g_p = tmp_g e_p = tmp_e; else: g_p += tmp_g; e_p",
"% np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape)",
"R[0] #for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]);",
"else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__",
"np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs)",
"#Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) #",
"= block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD =",
"plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: #",
"= %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: # markersize_",
"= 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show()",
"# #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm",
"##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List = [] # ##if 0:",
"np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust",
"tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp =",
"range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # #",
"= [] Gc = [] # Gf = [] Gf_p = [] Gc",
"tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd = J != I; I",
"ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ## ## ## for ii",
"= [] # Lumped = [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1))",
"= np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e",
"1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for",
"load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1)",
"else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if (i ==",
"Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__",
"tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p =",
"# ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #",
"##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List =",
"plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0]",
"= np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i],",
"Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # #",
"test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD",
"[] # Lumped = [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1))",
"markersize_ = 3 # else: # markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) #",
"= np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ##",
"markersize_ = 3 # else: # markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) #",
"= tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) #",
"= np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ##",
"= np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB);",
"## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus =",
"x_out_p = [] # Lumped = [] # Lumped = [] for i",
"lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i in range(nSub): # print (\"",
"== 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt')",
"the first line tmp = np.zeros((len(f0),3), dtype = float) for i in range(len(f0)):",
"## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ##",
"##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ### ##AcR =",
"#GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # # #for d in range(nSub):",
"xx[0] #RD = R[0] #for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); #",
"# xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub - 1): if (i",
"np.zeros((len(f0),3), dtype = float) for i in range(len(f0)): line = f0[i] k =",
"[] Bf = [] BcT_dense = [] Gc = [] # Gf =",
"tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile =",
"# x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2) # print(",
"Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ## Rc =",
"tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if 1: K = []",
"##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig =",
"\"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ##",
"ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python -",
"= load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # #",
"i in range(len(f0)): line = f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1]",
"import scipy.sparse.linalg as spla import pylab as plt from scipy.linalg import block_diag #",
"Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ##",
"##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(),",
"Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:])",
"- x11 + x2) # print( \"||x_out - x_out_p || = %e \"",
"### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz",
"Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test=",
"# ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1)",
"tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g e_p = tmp_e;",
"i in range(nSub - 1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else:",
"np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p",
"= float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n =",
"##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test = []",
"### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ### ##AcR",
"= K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD = R[0] #for i",
"- Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1",
"= load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1)",
"Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i],",
"= np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2) # print( \"||x_out -",
"= Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1)",
"#Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust",
"#KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm =",
"pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if",
"= Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ###",
"##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d,",
"Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust",
"\"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ##",
"1: K = [] K_reg = [] Fc = [] R = []",
"plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = [] ##for i",
"+= tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) #",
"del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g =",
"[] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J =",
"# print str0,i,j if (makeSymmetric): logInd = J != I; I = np.concatenate((I,J[logInd]))",
"# print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 +",
"load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1)",
"#dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # #",
"## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust -",
"= # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1))",
"# # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine =",
"# # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # # #for d",
"# BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K",
"ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ##",
"ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust",
"np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p =",
"load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\"",
"# test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0]",
"= sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return",
"f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2])",
"= rhs[0] #xxD = xx[0] #RD = R[0] #for i in range(1,nSub): #",
"weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ =",
"= np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo()",
"print( \"||x_out - x_out_p || = %e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python",
"Rf = [] Bc = [] Bf = [] BcT_dense = [] Gc",
"K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD = R[0] #for i in",
"- 1): if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g +=",
"Rc = RList[i].toarray() ## ## ## ## if (i == 0): ## Gf_clust_python",
"ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ##",
"x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z))",
"np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ##",
"# nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0)",
"# # #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test = []",
"Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0]",
"#x_test = [] # # #for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) #",
"for i in range(nSub - 1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1]))",
"alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2) #",
"= sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize",
"< 100: # markersize = 3 # else: # markersize = 0.7 #",
"##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd =",
"\"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust",
"= float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0]) m =",
"- K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e",
"print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2)",
"+ U.nonzero()[0].shape[0] ## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR",
"### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0]",
"= np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 =",
"[] Rf = [] Bc = [] Bf = [] BcT_dense = []",
"##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered) = %d",
"= tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd = J != I;",
"- 1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1]))",
"##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ =",
"ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ##",
"%d, nnz(reordered) = %d \") % (nnz0, nnzR) # # ##plt.show() # ##ker_Ac",
"in range(nSub - 1): if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else:",
"Ac_clust.shape[0] < 100: # markersize_ = 3 # else: # markersize_ = 0.7",
"##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r =",
"[] # BcK_dense = [] K_UT = [] # x_out = [] #",
"## ## for ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ##",
"np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ## Bf =",
"/ ddd1 ## ##K = [] # # # ##plt.subplot(1,2,1) ##plt.spy(Gf_clust_python,markersize=1) ##plt.subplot(1,2,2) ##plt.spy(Gf_clust,markersize=1)",
"BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) #",
"load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig",
"np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs",
"# # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test = []",
"= %d \") % (nnz0, nnzR) # # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1)",
"= np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ## Rc = RList[i].toarray() ## ##",
"Fc = [] R = [] Rf = [] Bc = [] Bf",
"else: # markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" %",
"(nnz0, nnzR) # # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0",
"## Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ##",
"= np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ##",
"#ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T))",
"# #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) #",
"line tmp = np.zeros((len(f0),3), dtype = float) for i in range(len(f0)): line =",
"in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # # #if False: #",
"markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] <",
"- Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust =",
"## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ##",
"= [] BcT_dense = [] Gc = [] # Gf = [] Gf_p",
"## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ##",
"#Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac",
"KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K",
"## ## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ##",
"ddd1 ## ##K = [] # # # ##plt.subplot(1,2,1) ##plt.spy(Gf_clust_python,markersize=1) ##plt.subplot(1,2,2) ##plt.spy(Gf_clust,markersize=1) ##plt.show()",
"! %d \" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p))",
"m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] #",
"Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) #",
"Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) #",
"numpy as np from scipy import sparse import scipy.sparse.linalg as spla import pylab",
"K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i]))",
"np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1)",
"(GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: # markersize = 3 #",
"= load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print",
"#K_reg_SF = [] #x_test = [] # # #for i in range(4): #",
"% np.linalg.norm(KKpK - K[i]) # # # # # # # # #",
"= np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] # #",
"= load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs",
"load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test =",
"sparse import scipy.sparse.linalg as spla import pylab as plt from scipy.linalg import block_diag",
"= Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ## ## ## for ii in",
"[] KplusBcT_p = [] Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp = []",
"as np from scipy import sparse import scipy.sparse.linalg as spla import pylab as",
"load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1)",
"= load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p =",
"#K_test= [] #Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF =",
"= np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1))",
"# plt.show() # ##Bc_from_Rt = [] ##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1)",
"GcTGc.shape[0] < 100: # markersize_ = 3 # else: # markersize_ = 0.7",
"= 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the",
"symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ##",
"load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # # #for",
"float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1])",
"'%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust",
"- Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1",
"if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i",
"- Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1",
"2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first",
"(i == 0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else:",
"#removes the first line tmp = np.zeros((len(f0),3), dtype = float) for i in",
"= np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b =",
"## ## Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:]",
"# gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1)",
"print (\" ! %d \" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11",
"[] # ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in",
"= load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p",
"np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust =",
"= [] Fc_p = [] rhs = [] xx = [] Kplus_f_test =",
"in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs =",
"## print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 =",
"(i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in",
"# BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K)",
"#x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2]))",
"# ##Bc_from_Rt = [] ##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ##",
"## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python",
"range(nSub - 1): if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g",
"load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc =",
"K_UT = [] # x_out = [] # x_out_p = [] # Lumped",
"= np.concatenate((xxD,xx[i])) # for i in range(nSub - 1): if (i == 0):",
"= np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD =",
"# # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1)",
"## Fc_python_List.append(Fc_i) ## ## for ii in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:]",
"for i in range(len(f0)): line = f0[i] k = line.split() tmp[i,0] = float(k[0])",
"# xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i],",
"# #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 =",
"= np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0",
"Fc_p = [] rhs = [] xx = [] Kplus_f_test = [] KplusBcT_p",
"if (tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I",
"# # # # #for d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) #",
"Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # #",
"Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ##",
"= np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if (i == 0): Gf_g",
"# #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A",
"np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g e_p",
"range(nSub): ## Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:]",
"= np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0",
"frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub -",
"= [] xx = [] Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow =",
"plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: # markersize_ = 3 # else: #",
"= np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test",
"## ## ## ## if (i == 0): ## Gf_clust_python = np.dot(Bf,Rc) ##",
"J != I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if",
"= np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # # #for d in range(nSub): #",
"i in range(nSub): ## Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red",
"= Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z))",
"- Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1",
"## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z))",
"np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python",
"= [] R = [] Rf = [] Bc = [] Bf =",
"# ##print (\"nnz = %d, nnz(reordered) = %d \") % (nnz0, nnzR) #",
"[] BcKplus_tmp = [] # BcK_dense = [] K_UT = [] # x_out",
"## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust -",
"Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1)",
"# print (\" ! %d \" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) #",
"np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK - K[i]) # #",
"rhs[0] #xxD = xx[0] #RD = R[0] #for i in range(1,nSub): # K_regD",
"Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append(",
"## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1]))",
"= xx[0] #RD = R[0] #for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]);",
"Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # #",
"np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii in range(nSub):",
"for i in range(nSub - 1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1]))",
"import block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines()",
"np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2]",
"# #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = #",
"= np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust =",
"#for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1))",
"= load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1)",
"K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) #",
"range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print",
"#frhs = rhs[0] #xxD = xx[0] #RD = R[0] #for i in range(1,nSub):",
"K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) #",
"x2) # print( \"||x_out - x_out_p || = %e \" % np.linalg.norm(x_out[i] -",
"np.dot(Gf[d].T,Gf[d]) # # # # #if False: # plt.subplot(1,3,1) # if GcTGc.shape[0] <",
"### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz =",
"UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print",
"%e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print",
"tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p",
"np.concatenate((xxD,xx[i])) # for i in range(nSub - 1): if (i == 0): Bc_g",
"##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub): ##",
"= np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0",
"# x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_",
"np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))])",
"##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ##",
"Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K",
"np.linalg.norm(ttt) ## ## ## for ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] -",
"## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ##",
"# plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: # markersize_ = 3 # else:",
"ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii])",
"# RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) #",
"## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print",
"0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub): ## Bc",
"from scipy.linalg import block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0",
"= np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc =",
"tmp path0 = \"../data\" if 1: K = [] K_reg = [] Fc",
"# # # # # # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1)",
"np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize =",
"[] R = [] Rf = [] Bc = [] Bf = []",
"+= Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs))",
"# # # # # # # # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_",
"print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ##",
"# # print str0,i,j if (makeSymmetric): logInd = J != I; I =",
"\" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print (",
"= Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ## Rc",
"## if (i == 0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc)",
"sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize =",
"pylab as plt from scipy.linalg import block_diag # # nSub = 2 def",
"= line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1):",
"#H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0],",
"np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # # #for d in range(nSub): # GfTGf_",
"...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1)",
"#gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H =",
"+ x2) # print( \"||x_out - x_out_p || = %e \" % np.linalg.norm(x_out[i]",
"def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line",
"% (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: # markersize = 3",
"in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) #",
"np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) #",
"ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python -",
"2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List = [] # ##if",
"for ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 =",
"Lumped = [] # Lumped = [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1))",
"= load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc",
"= open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3), dtype",
"indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T)))",
"i in range(nSub - 1): if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1]",
"/ np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if",
"load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # #",
"= 3 # else: # markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz",
"= np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if (i == 0): Bf_g",
"# if GcTGc.shape[0] < 100: # markersize_ = 3 # else: # markersize_",
"# print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK - K[i]) # # #",
"in range(nSub): # print (\" ! %d \" % (i)) # x10 =",
"= load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) #",
"Gc = [] Fc_p = [] rhs = [] xx = [] Kplus_f_test",
"[] Gf_p = [] Gc = [] Fc_p = [] rhs = []",
"if 1: K = [] K_reg = [] Fc = [] R =",
"%d \") % (nnz0, nnzR) # # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc",
"np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus",
"# # # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # #",
"tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0]) m",
"# Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1))",
"import numpy as np from scipy import sparse import scipy.sparse.linalg as spla import",
"# # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test",
"else: if (m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def",
"- GcList[ii] ## print np.linalg.norm(ttt) ## ## ## for ii in range(nSub): ##",
"= np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0",
"if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth",
"print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python)",
"= np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub - 1):",
"# del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g",
"x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2) # print( \"||x_out",
"= np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ =",
"#GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1",
"np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD =",
"if Ac_clust.shape[0] < 100: # markersize_ = 3 # else: # markersize_ =",
"## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub): ## Bc =",
"load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD = R[0]",
"# else: # markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\"",
"np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if (i",
"# ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r =",
"# #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) #",
"np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 =",
"K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e =",
"##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) #",
"Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1)",
"range(len(f0)): line = f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1])",
"# #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # # #for d in",
"[] # Gf = [] Gf_p = [] Gc = [] Fc_p =",
"d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # # #if False:",
"= np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V =",
"= [] KplusBcT_p = [] Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp =",
"= [] # # #for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1))",
"def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 =",
"## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] +",
"# ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) #",
"#Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b",
"## for ii in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ##",
"Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 =",
"#AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB))))",
"KplusBcT = [] BcKplus_tmp = [] # BcK_dense = [] K_UT = []",
"np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01",
"\") % (nnz0, nnzR) # # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc =",
"np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] # # print",
"/ ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust)",
"#nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A =",
"np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = [] #",
"#for d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # # #if",
"return tmp path0 = \"../data\" if 1: K = [] K_reg = []",
"np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T)",
"= np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z",
"rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g e_p =",
"= %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: # markersize",
"iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i]",
"= load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # # #",
"3 # else: # markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz =",
"# # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # #",
"##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ###",
"print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0):",
"# #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1)",
"# plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100:",
"str0,i,j if (makeSymmetric): logInd = J != I; I = np.concatenate((I,J[logInd])) J =",
"Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp",
"i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) #",
"x_out = [] # x_out_p = [] # Lumped = [] # Lumped",
"# # # # # # # # # ##plt.spy(Fc_clust,markersize = .8);plt.show() #",
"%d \" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) #",
"##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2)",
"g_p = tmp_g e_p = tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e))",
"= load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # #",
"Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ##",
"## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ##",
"## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ##",
"= load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if 1: K = [] K_reg",
"range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1))",
"## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ## ## ## for",
"= np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense",
"= load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z",
"as plt from scipy.linalg import block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric,",
"# iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K -",
"\"||x_out - x_out_p || = %e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python =",
"# plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: # markersize_ = 3 # else:",
"K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK =",
"np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust",
"I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp",
"scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3)",
"- x_out_p || = %e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust))",
"## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python",
"np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i]))",
"BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i)",
"### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ###",
"LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1);",
"Gf_p = [] Gc = [] Fc_p = [] rhs = [] xx",
"## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ##",
"ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0",
"tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n",
"sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp",
"load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if 1: K = [] K_reg =",
"#Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF = [] #x_test",
"## Rc = RList[i].toarray() ## ## ## ## if (i == 0): ##",
"Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python)",
"BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) #",
"in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1))",
"## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python",
"gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) #",
"np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB",
"#b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g =",
"# # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print",
"# lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i in range(nSub):",
"tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray()",
"Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python",
"float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0])",
"= np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 =",
"Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i],",
"Gf = [] Gf_p = [] Gc = [] Fc_p = [] rhs",
"K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK",
"= [] Gc = [] Fc_p = [] rhs = [] xx =",
"# KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD",
"##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered)",
"##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered) =",
"range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ## ##",
"0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) #",
"= np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize",
"# # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test =",
"= np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10",
"# # # # # # # # # # # ##plt.spy(Fc_clust,markersize =",
"load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1)",
"from scipy import sparse import scipy.sparse.linalg as spla import pylab as plt from",
"== 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub",
"## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1)",
"## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust =",
"tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0",
"= LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4)",
"block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i in",
"## print np.linalg.norm(ttt) ## ## ## for ii in range(nSub): ## ddd0 =",
"[] #x_test = [] # # #for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1))",
"# K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i])",
"range(nSub - 1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g =",
"= path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if 1:",
"firstLine = f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3), dtype = float)",
"symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125)",
"BcKplus_tmp = [] # BcK_dense = [] K_UT = [] # x_out =",
"else: # markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" %",
"offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line tmp =",
",markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List = []",
"BcKplus = BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ##",
"KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K =",
"False: # plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: # markersize_ = 3 #",
"K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1))",
"## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python =",
"## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ##",
"- Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1",
"block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine",
"(makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else: tmp =",
"Bc = [] Bf = [] BcT_dense = [] Gc = [] #",
"= [] Bf = [] BcT_dense = [] Gc = [] # Gf",
"# Gf = [] Gf_p = [] Gc = [] Fc_p = []",
"# # # # # # # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ =",
"sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0]",
"##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR",
"if (i == 0): g_p = tmp_g e_p = tmp_e; else: g_p +=",
"plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: # markersize = 3 # else: #",
"## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ##",
"# #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H",
"#dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) #",
"Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust",
"BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i",
"= load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ###",
"# #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1)",
"Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) #",
"np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1))",
"lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i in range(nSub): #",
"# else: # markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\"",
"= [] K_UT = [] # x_out = [] # x_out_p = []",
"= np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1):",
"print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = [] # # #",
"= np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V",
"# K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) #",
"## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ## Rc = RList[i].toarray()",
"= np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust =",
"##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List = [] # ##if 0: ##",
"= load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # #",
"Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0",
"= np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc =",
"markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] <",
"Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) #",
"Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc",
"= np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 =",
"spla import pylab as plt from scipy.linalg import block_diag # # nSub =",
"if (makeSymmetric): logInd = J != I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd]))",
"else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp",
"# Lumped = [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1))",
"np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11 + x2) # print( \"||x_out - x_out_p",
"= Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ## indBf",
"## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ##",
"## for i in range(nSub): ## Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0",
"100: # markersize_ = 3 # else: # markersize_ = 0.7 # plt.spy(Ac_clust,",
"float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = [] else:",
"lam_alpha[nLam:] # for i in range(nSub): # print (\" ! %d \" %",
"# # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0",
"BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)))",
"= R[0] #for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD =",
"# print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i ==",
"RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for",
"= block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i",
"## Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ##",
"np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub): ## Bc = Bct_list[i].toarray() ## indBc",
"= np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python",
"BcT_dense = [] Gc = [] # Gf = [] Gf_p = []",
"## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = [] # #",
"= -np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g e_p = tmp_e; else:",
"= BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red",
"np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs))",
"for i in range(nSub): ## Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ##",
"[] # x_out_p = [] # Lumped = [] # Lumped = []",
"Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) #",
"tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j if (makeSymmetric):",
"Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1",
"#RD = R[0] #for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD",
"plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0]",
"##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2)",
"#iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) #",
"plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt =",
"[] Bc = [] Bf = [] BcT_dense = [] Gc = []",
"np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:]",
"#xxD = xx[0] #RD = R[0] #for i in range(1,nSub): # K_regD =",
"x_out_p || = %e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z",
"[] # Lumped = [] # Lumped = [] for i in range(nSub):",
"Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ###",
"(i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in",
"markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) #",
"= BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i =",
"= np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii in",
"= 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3)",
"line = f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2]",
"KplusBcT_p = [] Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp = [] #",
"markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = []",
"= tmp_g e_p = tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print('",
"x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2",
"## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print",
"## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print",
"= \"../data\" if 1: K = [] K_reg = [] Fc = []",
"= np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) #",
"np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense =",
"load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1)",
"( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust -",
"## Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python =",
"[] Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp = [] # BcK_dense =",
"np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 /",
"= [] Gf_p = [] Gc = [] Fc_p = [] rhs =",
"print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python)",
"ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K =",
"load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd",
"Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) #",
"# #for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) #",
"= [] rhs = [] xx = [] Kplus_f_test = [] KplusBcT_p =",
"(i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))]",
"np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python =",
"( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) #",
"= ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1]))",
"if Fc_clust.shape[0] < 100: # markersize = 3 # else: # markersize =",
"Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha =",
"np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 -",
"##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List",
"i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) #",
"np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust =",
"print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust",
"ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0",
"= np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK - K[i])",
"#K_reg_test = [] #K_reg_SF = [] #x_test = [] # # #for i",
"indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_",
"RList[i].toarray() ## ## ## ## if (i == 0): ## Gf_clust_python = np.dot(Bf,Rc)",
"[] xx = [] Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow = []",
"# tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p",
"xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i]))",
"0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__",
"# if Ac_clust.shape[0] < 100: # markersize_ = 3 # else: # markersize_",
"Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i]",
"\"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ##",
"nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for",
"#K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF = [] #x_test = [] #",
"Gc = [] # Gf = [] Gf_p = [] Gc = []",
"# Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1))",
"##Bc_from_Rt = [] ##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## #",
"range(nSub): # print (\" ! %d \" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i])",
"K_reg = [] Fc = [] R = [] Rf = [] Bc",
"## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # #",
"= [] #K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF = [] #x_test =",
"- K[i]) # # # # # # # # # # #",
"= [] # ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i",
"Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = [] # # # ##plt.subplot(1,2,1) ##plt.spy(Gf_clust_python,markersize=1) ##plt.subplot(1,2,2)",
"in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ##",
"= .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True)",
"## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust -",
"L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR)",
"scipy.linalg import block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 =",
"scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)]",
"\"../data\" if 1: K = [] K_reg = [] Fc = [] R",
"[] Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow = [] KplusBcT = []",
"\"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = [] # # # ##plt.subplot(1,2,1)",
"= tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd =",
"Bf = [] BcT_dense = [] Gc = [] # Gf = []",
"np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if (i == 0): Gf_g =",
"- np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1",
"markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) #",
"= float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp = []",
"= sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset)",
"= Bf[indBf,:] ## ## Rc = RList[i].toarray() ## ## ## ## if (i",
"np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1)",
"tmp = [] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset;",
"# # # ##Fc_python_List = [] # ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0],",
"# lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0]",
"1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for",
"#xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust",
"% (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: # markersize_ = 3",
"= load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac =",
"= np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x =",
"# ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub):",
"# nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] #",
"##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0]",
"path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if 1: K",
"= np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g",
"if GcTGc.shape[0] < 100: # markersize_ = 3 # else: # markersize_ =",
"BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red =",
"##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ =",
"# x_out = [] # x_out_p = [] # Lumped = [] #",
"load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense",
"in range(nSub - 1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g",
"load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust,",
"Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i],",
"Fc_clust.shape[0] < 100: # markersize = 3 # else: # markersize = 0.7",
"## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python",
"= np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc",
"(makeSymmetric): logInd = J != I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V",
"else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if (i ==",
"Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ##",
"else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0",
"Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python",
"# markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0]))",
"# #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g =",
"= [] Bc = [] Bf = [] BcT_dense = [] Gc =",
"#ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig =",
"load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python))",
"= L.nonzero()[0].shape[0] + U.nonzero()[0].shape[0] ## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR =",
"Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ =",
"0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub -",
"(\" ! %d \" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 =",
"Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii",
"#GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0",
"# # # # # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) #",
"np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust)",
"np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs))",
"sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return",
"## alpha_p = lam_alpha[nLam:] # for i in range(nSub): # print (\" !",
"in range(nSub): ## Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red =",
"= np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if",
"load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test",
"np.linalg.norm(KKpK - K[i]) # # # # # # # # # #",
"Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ## indBf =",
"## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for",
"Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc)))",
"print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1)",
"Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus",
"K = [] K_reg = [] Fc = [] R = [] Rf",
"# plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if",
"tmp = np.zeros((len(f0),3), dtype = float) for i in range(len(f0)): line = f0[i]",
"Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ## ## ## for ii in range(nSub):",
"BcK_dense = [] K_UT = [] # x_out = [] # x_out_p =",
"i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs",
"else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p =",
"= [] K_reg = [] Fc = [] R = [] Rf =",
"Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub): ## Bc = Bct_list[i].toarray()",
"print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust =",
"# #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g",
"V = tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd = J !=",
"lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] #",
"# markersize = 3 # else: # markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_)",
"markersize = 3 # else: # markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) #",
"0): g_p = tmp_g e_p = tmp_e; else: g_p += tmp_g; e_p =",
"rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T)",
"##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac",
"# Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append(",
"[] ##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_",
"if (i == 0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ##",
"np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T)",
"## for ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1",
"= load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD =",
"100: # markersize = 3 # else: # markersize = 0.7 # plt.spy(Fc_clust,",
"# #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF",
"== 0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else: ##",
"Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ##",
"KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK -",
"= np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] )",
"## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ##",
"Bc = Bct_list[i].toarray() ## indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus",
"ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python -",
"Bct_list[i].shape[0])) ## for i in range(nSub): ## Bc = Bct_list[i].toarray() ## indBc =",
"# # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__))",
"## Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii in range(nSub): ##",
"# GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # # #if False: # plt.subplot(1,3,1) #",
"= load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python =",
"ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ##",
"## ddd1 = np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K",
"np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust",
"Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) #",
"load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs =",
"np from scipy import sparse import scipy.sparse.linalg as spla import pylab as plt",
"%d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = [] ##for i in range(1,14):",
"= %3.8e \\n\" % np.linalg.norm(KKpK - K[i]) # # # # # #",
"import pylab as plt from scipy.linalg import block_diag # # nSub = 2",
"= load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf =",
"load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r",
"in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ##",
"#Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc",
"Fc_python_List.append(Fc_i) ## ## for ii in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] -",
"Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) #",
"# markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0]))",
"# # #for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1))",
"= np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ## Bf",
"= np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 =",
"np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0",
"#gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # #",
"= 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # # # ##Fc_python_List = [] #",
"## BcKplus = BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0",
"load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z =",
"#A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD",
"# markersize_ = 3 # else: # markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_)",
"## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # #",
"GcList[ii] ## print np.linalg.norm(ttt) ## ## ## for ii in range(nSub): ## ddd0",
"np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ## Rc = RList[i].toarray() ## ## ##",
"(Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = [] ##for i in range(1,14): ## Bc_from_Rt.append(",
"I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp =",
"= np.linalg.norm(Ac_clust) ## print \"|Ac_clust_python - Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = []",
"V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' #",
"Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc)))",
"np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK - K[i]) #",
"##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered) = %d \") %",
"BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1))",
"= load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= [] #Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test",
"0): ## Gf_clust_python = np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python",
"nnz(reordered) = %d \") % (nnz0, nnzR) # # ##plt.show() # ##ker_Ac =",
"## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0]",
"# # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust)",
"i in range(nSub): # print (\" ! %d \" % (i)) # x10",
"# # # #for d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # #",
"## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1)",
"= np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 =",
"= 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2)",
"i in range(nSub - 1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else:",
"= f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] =",
"# plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100:",
"# # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0",
"= np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0])) ## for i in range(nSub): ## Bc = Bct_list[i].toarray() ##",
"np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii in range(nSub): ## ## ttt =",
"Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i in",
"# markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Fc_clust.nonzero()[0].shape[0]))",
"##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H",
"indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ## Rc = RList[i].toarray() ##",
"KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T)",
"range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # # #if False: # plt.subplot(1,3,1)",
"3 # else: # markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz =",
"nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes",
"## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ##",
"K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1))",
"(Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: # markersize_ = 3 #",
"= load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) # # #K_test= []",
"\\n\" % np.linalg.norm(KKpK - K[i]) # # # # # # # #",
"Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1);",
"[] Fc_p = [] rhs = [] xx = [] Kplus_f_test = []",
"#A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs =",
"= 3 # else: # markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz",
"## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt) ## ## ##",
"= np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) # print(del_)",
"= np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust",
"#AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0]",
"- Gc_clust_python) ## ddd1 = np.linalg.norm(Gc_clust) ## print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1",
"ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python -",
"/ ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 = np.linalg.norm(Ac_clust)",
"if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i",
"# # # #if False: # plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: #",
"# K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i],",
"= np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__",
"np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python - Fc_clust_myAp|/|Fc_clust_python|\",ddd0 /",
"!= I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse):",
"J = tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd",
"[] rhs = [] xx = [] Kplus_f_test = [] KplusBcT_p = []",
"g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p))",
"x11 + x2) # print( \"||x_out - x_out_p || = %e \" %",
"print \"|Gc_clust_python - Gc_clust_myAp|/|Gc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gf_clust - Gf_clust_python)",
"(i == 0): g_p = tmp_g e_p = tmp_e; else: g_p += tmp_g;",
"load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam =",
"# plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if",
"#BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf",
"np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g",
"Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1))",
"= np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H))",
"= np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) #",
"block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i]))",
"##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered) = %d \") % (nnz0,",
"- Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1",
"= [] Fc = [] R = [] Rf = [] Bc =",
"- 1): if (i == 0): Bc_g = np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1]))",
"# ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 =",
"= np.zeros((len(f0),3), dtype = float) for i in range(len(f0)): line = f0[i] k",
"# x_out_p.append(x10 - x11 + x2) # print( \"||x_out - x_out_p || =",
"0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() #",
"# K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i]))",
"= [] # BcK_dense = [] K_UT = [] # x_out = []",
"load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p =",
"= np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p",
"= np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6)))",
"= np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = -",
"# # # # # # #for d in range(nSub): # GfTGf_ +=",
"= [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1))",
"= np.concatenate((frhs,crhs)) # #x = np.linalg.solve(A,b) # #xxD = np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2]))",
"# # #for d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # #",
"= load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1)",
"load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1)",
"np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 /",
"R = [] Rf = [] Bc = [] Bf = [] BcT_dense",
"ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0",
"# tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp path0 = \"../data\" if 1: K =",
"= float) for i in range(len(f0)): line = f0[i] k = line.split() tmp[i,0]",
"U.nonzero()[0].shape[0] ## ## ### ### ##AcR = Ac_clust[np.ix_(r,r)] ##PR,LR,UR = scipy.linalg.lu(AcR) ##nnzR =",
"% (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = [] ##for i in range(1,14): ##",
"# frhs = np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub",
"alpha_p = lam_alpha[nLam:] # for i in range(nSub): # print (\" ! %d",
"= Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T))",
"load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)]",
"# plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = [] ##for",
") / np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i])",
"range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) #",
"import sparse import scipy.sparse.linalg as spla import pylab as plt from scipy.linalg import",
"ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p",
"k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if",
"% (nnz0, nnzR) # # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1)",
"# # # # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # #",
"## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ##",
"range(nSub - 1): if (i == 0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g =",
"plt.xlabel(\"nnz = %d\" % (GcTGc.nonzero()[0].shape[0])) # plt.subplot(1,3,2) # if Fc_clust.shape[0] < 100: #",
"# #if False: # plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: # markersize_ =",
"= load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 =",
"for ii in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print",
"= np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if",
"load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig",
"Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ##",
"= f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3), dtype = float) for",
"line.split() tmp[i,0] = float(k[0]) tmp[i,1] = float(k[1]) tmp[i,2] = float(k[2]) if (tmp.shape[0]==1): tmp",
"else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset;",
"return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile = path+'/'+str(i)+'/'+str0+str(j)+'.txt' # tmp = load_matrix_basic(pathToFile,makeSparse,makeSymmetric,offset) return tmp",
"# if Fc_clust.shape[0] < 100: # markersize = 3 # else: # markersize",
"logInd = J != I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V =",
"Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ## Bf = Bf_List[i].toarray() ##",
"np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else:",
"0.125) # # # ##Fc_python_List = [] # ##if 0: ## Fc_clust_python =",
".8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust",
"load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) #",
"## Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ## Bf = Bf_List[i].toarray()",
"# # ##Fc_python_List = [] # ##if 0: ## Fc_clust_python = np.zeros((Bct_list[i].shape[0], Bct_list[i].shape[0]))",
"= np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0",
"% (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print",
"# for i in range(nSub - 1): if (i == 0): Bc_g =",
"# plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt",
"[] Gc = [] Fc_p = [] rhs = [] xx = []",
"ii in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii] ## print np.linalg.norm(ttt)",
"else: # markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz = %d\" %",
"np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust",
"scipy.sparse.linalg as spla import pylab as plt from scipy.linalg import block_diag # #",
"= RList[i].toarray() ## ## ## ## if (i == 0): ## Gf_clust_python =",
"np.hstack((Bc[0],Bc[1])) else: Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if (i",
"xx = [] Kplus_f_test = [] KplusBcT_p = [] Bc_nonzRow = [] KplusBcT",
"scipy import sparse import scipy.sparse.linalg as spla import pylab as plt from scipy.linalg",
"plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: # markersize_ = 3 # else: #",
"- x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python =",
"# gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc)",
"Gf_g += Gf_p[i+1] weigth = np.loadtxt(path0+'/dump_weigth.txt') #Fc__ = np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ =",
"%d\" % (Fc_clust.nonzero()[0].shape[0])) # plt.subplot(1,3,3) # if Ac_clust.shape[0] < 100: # markersize_ =",
"BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python",
"= 3 # else: # markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz",
"= np.concatenate((xxD,crhs)) #Bc_g = np.hstack((Bc_g,Bc[2])) #Bc_g = np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust =",
"load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) #",
"np.concatenate((frhs,rhs[i])) # xxD = np.concatenate((xxD,xx[i])) # for i in range(nSub - 1): if",
"# ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H =",
"as spla import pylab as plt from scipy.linalg import block_diag # # nSub",
"np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) #",
"[] KplusBcT = [] BcKplus_tmp = [] # BcK_dense = [] K_UT =",
"np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z = np.hstack((Bc_g,np.zeros((nB,nB)))) # #crhs = np.zeros(nB); #",
"tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j if (makeSymmetric): logInd = J",
"#crhs = np.zeros(nB); # #A = np.vstack((A0,Bc_Z)) # #b = np.concatenate((frhs,crhs)) # #x",
"## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ##",
"# x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] #",
"Bc_g = np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if (i == 0):",
"nnzR) # # ##plt.show() # ##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 =",
"= lam_alpha[nLam:] # for i in range(nSub): # print (\" ! %d \"",
"= load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0]))",
"= load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha = load_matrix(path0,\"dump_lam_alpha_\",\"\",str(0),False,False,1) # lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam",
"= %e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z = np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1]))",
"Bf[indBf,:] ## ## Rc = RList[i].toarray() ## ## ## ## if (i ==",
"## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ##",
"load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1) Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),False,True,1) ker_GcTGc = load_matrix(path0,\"dump_kerGc_\",\"\",str(0),False,False,1) # gc = load_matrix(path0,\"dump_gc_\",\"\",str(0),False,False,1) # lam_alpha",
"np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 =",
"= np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python = np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus =",
"Bf_red = Bf[indBf,:] ## ## Rc = RList[i].toarray() ## ## ## ## if",
"np.hstack((Bc_g,Bc[i+1])) for i in range(nSub - 1): if (i == 0): Bf_g =",
"= np.hstack((Gc_clust_python,np.dot(Bc,Rc))) ## indBcKplus = np.abs(BcKplus).sum(axis=1)>0 ## BcKplus = BcKplus[indBcKplus,:] ## BcKplus_python =",
"# K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK",
"# ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust =",
"KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD =",
"= - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 = np.hstack((Fc__,Gc_clust))",
"x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ =",
"+= np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii in range(nSub): ## ## ttt",
"= [] # Gf = [] Gf_p = [] Gc = [] Fc_p",
"H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB =",
"= load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf = load_matrix(path0,\"dump_iGfTGf_\",\"\",str(0),False,False,1) #ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 =",
"- Fc_clust_myAp|/|Fc_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Gc_clust - Gc_clust_python) ## ddd1",
"# # # # #if False: # plt.subplot(1,3,1) # if GcTGc.shape[0] < 100:",
"# KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) #",
"# # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) # # x_out_p.append(x10 - x11",
"#Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf",
"## ## for ii in range(nSub): ## ## ttt = Gc_List[ii][np.abs(Gc_List[ii]).sum(axis=1)>0,:] - GcList[ii]",
"#K_regD = K_reg[0] #frhs = rhs[0] #xxD = xx[0] #RD = R[0] #for",
"Fc_clust_python += np.dot(Bc,BcKplus_) ## Fc_python_List.append(Fc_i) ## ## for ii in range(nSub): ## ##",
"= J != I; I = np.concatenate((I,J[logInd])) J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd]))",
"#AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 = np.vstack((ZZ1,H)) #AA01 =",
"[] #K_reg_SF = [] #x_test = [] # # #for i in range(4):",
"= [] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J",
"np.linalg.solve(K_reg[i],K[i]) # K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] ) /",
"# # # # # # # # # # ##plt.spy(Fc_clust,markersize = .8);plt.show()",
"np.zeros((Gc_clust.shape[1],Ac_clust_python.shape[1])) print ( Z.shape) Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test =",
"np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python += np.dot(Bc,BcKplus_)",
"gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),False,True,1)",
"- Ac_clust_myAp|/|Ac_clust_python|\",ddd0 / ddd1 ## ##K = [] # # # ##plt.subplot(1,2,1) ##plt.spy(Gf_clust_python,markersize=1)",
"= np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python) ## Fc_clust_python +=",
"Lumped = [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1))",
"range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i]))",
"# Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i]",
"# # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd =",
"##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); # ##print (\"nnz = %d, nnz(reordered) = %d \")",
"plt from scipy.linalg import block_diag # # nSub = 2 def load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset):",
"[] # # #for i in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) #",
"ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii]) ## ddd1 = np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python -",
"|| = %e \" % np.linalg.norm(x_out[i] - x_out_p[i])) Ac_clust_python = np.hstack((Fc_clust,Gc_clust)) Z =",
"[] K_UT = [] # x_out = [] # x_out_p = [] #",
"[] Fc = [] R = [] Rf = [] Bc = []",
"e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p =",
"Ac_clust_python = np.vstack((Ac_clust_python,Z)) Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",str(0),False,False,1) # test = load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1)",
"R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i])) # Lumped.append(load_matrix(path0,\"dump_Lumped_\",\"\",str(i),False,False,1)) BcT_dense.append(load_matrix(path0,\"dump_BcT_dense_\",\"\",str(i),False,False,1)) Gc.append(load_matrix(path0,\"dump_Gc_\",\"\",str(i),False,False,1)) # Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc =",
"##print (\"nnz = %d, nnz(reordered) = %d \") % (nnz0, nnzR) # #",
"# Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1)) # # #KKpK = np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK =",
"-np.dot(R[i].T,rhs[i]) if (i == 0): g_p = tmp_g e_p = tmp_e; else: g_p",
"# lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam]",
"0): Bf_g = np.hstack((Bf[0],Bf[1])) else: Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub -",
"Bf_g = np.hstack((Bf_g,Bf[i+1])) for i in range(nSub - 1): if (i == 0):",
"## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape)",
") ## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense =",
"#for i in range(1,nSub): # K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); #",
"[] #K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF = [] #x_test = []",
"GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # # #if False: # plt.subplot(1,3,1) # if",
"[] #K_reg_test = [] #K_reg_SF = [] #x_test = [] # # #for",
"np.dot(Bf,Rc) ## Gc_clust_python = np.dot(Bc,Rc) ## else: ## Gf_clust_python = np.hstack((Gf_clust_python,np.dot(Bf,Rc))) ## Gc_clust_python",
"(tmp.shape[0]==1): tmp = [] else: n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I =",
"gc) # nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:]",
"[] BcT_dense = [] Gc = [] # Gf = [] Gf_p =",
"/ ddd1 ## ## ## Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ##",
"= lam_alpha_p[0:nLam] ## alpha_p = lam_alpha[nLam:] # for i in range(nSub): # print",
"# x_out_p = [] # Lumped = [] # Lumped = [] for",
"# # x_out_p.append(x10 - x11 + x2) # print( \"||x_out - x_out_p ||",
"##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1);",
"+ UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1); ##plt.subplot(2,2,3) ##plt.spy(LR,markersize=0.1); ##plt.subplot(2,2,4) ##plt.spy(UR,markersize=0.1); #",
"Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ##",
"= [] BcKplus_tmp = [] # BcK_dense = [] K_UT = [] #",
"= Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0) ##plt.subplot(1,2,2) ##plt.spy(Ac,markersize = 0.125) # #",
"Bf = Bf_List[i].toarray() ## indBf = np.abs(Bf).sum(axis=1)>0 ## Bf_red = Bf[indBf,:] ## ##",
"# BcK_dense = [] K_UT = [] # x_out = [] # x_out_p",
"load_matrix_basic(pathToFile,makeSparse,makeSymmetric, offset): f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line tmp",
"K_iK_K = np.dot(K[i],iK_K) # del_ = np.linalg.norm(K_iK_K - K[i] ) / np.linalg.norm(K[i]) #",
"f0 = open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3),",
"np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ## ddd0",
"= np.linalg.norm(Fc_python_List[ii]) ## print \"|Fc_python - Fc_myAp|/|Fc_python|\",ddd0 / ddd1 ## ## ## Fc_clust",
"[] # x_out = [] # x_out_p = [] # Lumped = []",
"## ## Rc = RList[i].toarray() ## ## ## ## if (i == 0):",
"np.hstack((Bc_g,Bc[2])) #BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1) #Fc_clust = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1)",
"# markersize_ = 3 # else: # markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_)",
"= [] # x_out = [] # x_out_p = [] # Lumped =",
"#if False: # plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: # markersize_ = 3",
"K[i]) # # # # # # # # # # # #",
"first line tmp = np.zeros((len(f0),3), dtype = float) for i in range(len(f0)): line",
"# for i in range(nSub): # print (\" ! %d \" % (i))",
"x_out_p.append(x10 - x11 + x2) # print( \"||x_out - x_out_p || = %e",
"[] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1))",
"tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i)) # gc_p = np.concatenate((g_p,e_p)) # gc_p",
"# ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac = Ac_clust.toarray()[np.ix_(r,r)] ##plt.subplot(1,2,1) ##plt.spy(Ac_clust ,markersize = 2.0)",
"# KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK",
"(m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset): pathToFile",
"= load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(),",
"(\"nnz = %d, nnz(reordered) = %d \") % (nnz0, nnzR) # # ##plt.show()",
"BcKplus[indBcKplus,:] ## BcKplus_python = np.linalg.solve(K_reg_List[i],Bc_red.T) ## BcKplus_ = np.linalg.solve(K_reg_List[i],Bc.T) ## Fc_i = np.dot(Bc_red,BcKplus_python)",
"for i in range(nSub - 1): if (i == 0): Gf_g = Gf_p[0]+",
"load_matrix(path0,\"dump_testXYZ_\",\"\",str(0),False,False,1) # KpOnes= load_matrix(path0,\"dump_KplusONES_\",\"\",str(0),False,False,1) #K_regD = K_reg[0] #frhs = rhs[0] #xxD = xx[0]",
"= %d\" % (Ac_clust.nonzero()[0].shape[0])) # plt.show() # ##Bc_from_Rt = [] ##for i in",
"(i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1] weigth =",
"## ## ## for ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii] - Fc_List[ii])",
"np.linalg.norm(K[i]) # print(del_) # tmp_g = np.dot(Bc[i],np.linalg.solve(K_reg[i], rhs[i])) tmp_e = -np.dot(R[i].T,rhs[i]) if (i",
"# Gf.append(load_matrix(path0,\"dump_Gf_\",\"\",str(i),False,False,1)) indBc = np.abs(Bc[i]).sum(axis=1)>0 Bc_nonzRow.append( Bc[i][indBc,:]) # Fc.append( np.dot(Bc_nonzRow[i], np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append(",
"load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1) ##dFc_svd = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(555),False,False,1) #dAc_eig = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(444),False,False,1) ##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_",
"load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc) # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True)",
"tmp_g e_p = tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d",
"##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray()",
"#ec__ = - np.dot(RD.T,frhs) # #gc__ = np.concatenate((gc__,ec__)) #H = ker_GcTGc #AA0 =",
"Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ##",
"3 # else: # markersize_ = 0.7 # plt.spy(GcTGc, markersize=markersize_) # plt.xlabel(\"nnz =",
"# # #if False: # plt.subplot(1,3,1) # if GcTGc.shape[0] < 100: # markersize_",
"= [] #x_test = [] # # #for i in range(4): # #",
"# BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i]))",
"# # # # # # # # # # # # #",
"= [] ##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) ) ## # ##",
"+= np.dot(Gf[d].T,Gf[d]) # # # # #if False: # plt.subplot(1,3,1) # if GcTGc.shape[0]",
"if (m==1): tmp = V else: tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).toarray() return tmp def load_matrix(path,str0,i,j,makeSparse,makeSymmetric,offset):",
"# x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # # print alpha_p[(6*i):(6*(i+1))] # x2 = np.dot(R[i],alpha_p[(6*i):(6*(i+1))]) #",
"load_matrix(path0,\"dump_Ac_clust_\",\"\",0,False,True,1) ## Ac_clust_python = np.hstack((Fc_clust_python,Gc_clust_python)) ## ## Z = np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print (",
"# K_regD = block_diag(K_regD,K_reg[i]); # RD = block_diag(RD,R[i]); # frhs = np.concatenate((frhs,rhs[i])) #",
"Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] #",
"np.dot(K_test[i], np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\"",
"= load_matrix(path0,\"dump_Fc_clust_\",\"\",str(0),True,True,1) #Ac_clust = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(0),True,True,1) #GcTGc = load_matrix(path0,\"dump_GcTGc_clust_\",\"\",str(0),False,True,1) #GfTGf = load_matrix(path0,\"dump_GfTGf_\",\"\",str(0),False,False,1) #iGfTGf =",
"##dAc_svd = load_matrix(path0,\"dump_Ac_clust_\",\"\",str(555),False,False,1) # # #GfTGf_ = np.zeros((GfTGf.shape[0],GfTGf.shape[0])) # # # # #",
"V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp =",
"## # ## Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),False,False,1) # # # # ##BcT_dense = load_matrix(path0,\"dump_BcT_dense_\",\"\",str(0),True,True,1)",
"I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V = tmp[1::,2] # # print str0,i,j",
"print \"norm = %3.8e \\n\" % np.linalg.norm(KKpK - K[i]) # # # #",
"for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1)) K_UT.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,False,1)) K_reg.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) Fc.append(load_matrix(path0,\"dump_Fc_\",\"\",str(i),False,False,1)) R.append(load_matrix(path0,\"dump_R_\",\"\",str(i),False,False,1)) Rf.append(load_matrix(path0,\"dump_Rf_\",\"\",str(i),False,False,1)) Bc.append(load_matrix(path0,\"dump_Bc_\",\"\",str(i),False,False,1)) Bf.append(load_matrix(path0,\"dump_Bf_\",\"\",str(i),False,False,1)) Gf_p.append(np.dot(Bf[i],Rf[i]))",
"np.linalg.norm(Gf_clust - Gf_clust_python) ## ddd1 = np.linalg.norm(Gf_clust) ## print \"|Gf_clust_python - Gf_clust_myAp|/|Gf_clust_python|\",ddd0 /",
"plt.show() # ##Bc_from_Rt = [] ##for i in range(1,14): ## Bc_from_Rt.append( load_matrix(path0,\"dump_Bc_from_Rt_\",\"\",str(i),False,False,1) )",
"= [] Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp = [] # BcK_dense",
"J = np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if",
"np.dot(Bc_g,np.linalg.solve(K_regD,Bc_g.T)) # # #gc__ = np.dot(Bc_g,np.linalg.solve(K_regD,frhs)) #ec__ = - np.dot(RD.T,frhs) # #gc__ =",
"%3.8e \\n\" % np.linalg.norm(KKpK - K[i]) # # # # # # #",
"indBc = np.abs(Bc).sum(axis=1)>0 ## Bc_red = Bc[indBc,:] ## BcKplus = BcKplus_List[i] ## ##",
"## ## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print",
"open(pathToFile).readlines() firstLine = f0.pop(0) #removes the first line tmp = np.zeros((len(f0),3), dtype =",
"\" % (i)) # x10 = np.linalg.solve(K_reg[i],rhs[i]) # x11 = np.linalg.solve(K_reg[i],np.dot(Bc[i].T,lam_p)) # #",
"# gc_p = np.concatenate((g_p,e_p)) # gc_p = np.concatenate((gc_p,np.zeros(6))) Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",str(0),False,False,1) Ac_clust =",
"# print( \"||x_out - x_out_p || = %e \" % np.linalg.norm(x_out[i] - x_out_p[i]))",
"in range(len(f0)): line = f0[i] k = line.split() tmp[i,0] = float(k[0]) tmp[i,1] =",
"# # # # # #for d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d])",
"= scipy.linalg.lu(AcR) ##nnzR = LR.nonzero()[0].shape[0] + UR.nonzero()[0].shape[0] ### ### ##plt.subplot(2,2,1) ##plt.spy(L,markersize=0.1); ##plt.subplot(2,2,2) ##plt.spy(U,markersize=0.1);",
"BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K =",
"np.vstack((ZZ1,H)) #AA01 = np.hstack((AA0,AA1)) #A0 = np.hstack((K_regD,Bc_g.T)) # #nB = Bc_g.shape[0] #Bc_Z =",
"n = np.int32(tmp[0,0]) m = np.int32(tmp[0,1]) I = tmp[1::,0]-offset; J = tmp[1::,1]-offset; V",
"= [] #K_reg_test = [] #K_reg_SF = [] #x_test = [] # #",
"= load_matrix(path0,\"dump_Fc_clust_\",\"\",0,False,True,1) ## Gc_clust = load_matrix(path0,\"dump_Gc_clust_\",\"\",0,False,False,1) ## Gf_clust = load_matrix(path0,\"dump_Gf_clust_\",\"\",0,False,False,1) ## Ac_clust =",
"1): if (i == 0): Gf_g = Gf_p[0]+ Gf_p[1] else: Gf_g += Gf_p[i+1]",
"## ddd0 = np.linalg.norm(Fc_clust - Fc_clust_python) ## ddd1 = np.linalg.norm(Fc_clust) ## print \"|Fc_clust_python",
"= np.zeros((Gc_clust_python.shape[1],Ac_clust.shape[1])) ## print ( Z.shape) ## Ac_clust_python = np.vstack((Ac_clust_python,Z)) ## ## ##",
"#AA0 = np.hstack((Fc__,Gc_clust)) #AB1 = # # #ZZ1 = np.zeros((Gc_clust.shape[0], H.shape[1])) #AA1 =",
"= load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,False,1) ##KpBcT0 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(0),False,False,1) ##KpBcT1 = load_matrix(path0,\"dump_KplusBcT_\",\"\",str(1),False,False,1) # # #dFc_eig = load_matrix(path0,\"dump_Fc_clust_\",\"\",str(444),False,False,1)",
"np.linalg.solve(K_reg_test[i],K_test[i])) # KKpK = np.dot(K[i], np.linalg.solve(K_reg[i],K[i])) # print \"norm = %3.8e \\n\" %",
"e_p = tmp_e; else: g_p += tmp_g; e_p = np.concatenate((e_p,tmp_e)) print(' ...%d '%(i))",
"# else: # markersize = 0.7 # plt.spy(Fc_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\"",
"np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))) # Lumped.append( np.dot(Bc_nonzRow[i], np.dot(K[i],Bc_nonzRow[i].T))) rhs.append(load_matrix(path0,\"dump_rhs_\",\"\",str(i),False,False,1)) # xx.append(load_matrix(path0,\"dump_xxTest_\",\"\",str(i),False,False,1)) # Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p =",
"= [] #K_reg_SF = [] #x_test = [] # # #for i in",
"for i in range(nSub): # print (\" ! %d \" % (i)) #",
"in range(4): # # K_test.append(load_matrix(path0,\"dump_K_dense_\",\"\",str(i),False,True,1)) # K_reg_test.append(load_matrix(path0,\"dump_K_reg_\",\"\",str(i),False,True,1)) # K_reg_SF.append(load_matrix(path0,\"dump_K_reg_SF_\",\"\",str(i),False,True,1)) # Kplus_K_test.append(load_matrix(path0,\"dump_Kplus_K_\",\"\",str(i),False,False,1)) # K_Kplus_K_test.append(load_matrix(path0,\"dump_K_Kplus_K_\",\"\",str(i),False,False,1))",
"Kplus_f_test.append(load_matrix(path0,\"dump_Kplus_f_test_\",\"\",str(i),False,False,1)) # KplusBcT_p = BcKplus_List[i] # BcK_dense.append(load_matrix(path0,\"dump_BcK_dense_\",\"\",str(i),False,False,1)) # BcK_dense.append(np.dot(K[i],Bc_nonzRow[i].T).T) Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T))",
"Gc.append(np.dot(Bc[i], R[i])) KplusBcT.append(load_matrix(path0,\"dump_KplusBcT_\",\"\",str(i),False,False,1)) KplusBcT_p.append(np.linalg.solve(K_reg[i],Bc_nonzRow[i].T)) # BcKplus_tmp.append(np.linalg.solve(K_reg[i],Bc[i].T).T) # x_out.append(load_matrix(path0,\"dump_x_out_\",\"\",str(i),False,False,1)) Fc_p.append(np.dot(Bc_nonzRow[i],KplusBcT_p[i])) # iK_K = np.linalg.solve(K_reg[i],K[i])",
"lam_alpha_p = np.linalg.solve(Ac_clust, gc) # nLam = Bc[0].shape[0] # lam_p = lam_alpha_p[0:nLam] ##",
"= [] # x_out_p = [] # Lumped = [] # Lumped =",
"# #for d in range(nSub): # GfTGf_ += np.dot(Gf[d].T,Gf[d]) # # # #",
"= sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U= scipy.linalg.lu(Ac_clust) ##nnz0 = L.nonzero()[0].shape[0] +",
"##ker_Ac = load_matrix(path0,\"dump_ker_Ac_\",\"\",str(0),False,True,1) ##ker_GcTGc = load_matrix(path0,\"dump_ker_GcTGc_\",\"\",str(0),False,True,1) ##R0 = load_matrix(path0,\"dump_R_\",\"\",str(0),False,True,1) # ##Gc_H = np.dot(GcTGc.toarray(),ker_GcTGc)",
"markersize_ = 0.7 # plt.spy(Ac_clust, markersize=markersize_) # plt.xlabel(\"nnz = %d\" % (Ac_clust.nonzero()[0].shape[0])) #",
"print np.linalg.norm(ttt) ## ## ## for ii in range(nSub): ## ddd0 = np.linalg.norm(Fc_python_List[ii]",
"[] #Kplus_K_test = [] #K_Kplus_K_test = [] #K_reg_test = [] #K_reg_SF = []",
"np.concatenate((J,I[logInd])) V = np.concatenate((V,V[logInd])) if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp",
"load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r = sparse.csgraph.reverse_cuthill_mckee(Ac_clust.tocsr(), symmetric_mode=True) ##Ac_clust = Ac_clust.toarray() ### ##P,L,U=",
"Gf_clust_myAp|/|Gf_clust_python|\",ddd0 / ddd1 ## ## ddd0 = np.linalg.norm(Ac_clust - Ac_clust_python) ## ddd1 =",
"if (makeSparse): tmp = sparse.csc_matrix((V,(I,J)),shape=(n,m)).tocoo() else: if (m==1): tmp = V else: tmp",
"Bc_nonzRow = [] KplusBcT = [] BcKplus_tmp = [] # BcK_dense = []",
"# Lumped = [] # Lumped = [] for i in range(nSub): K.append(load_matrix(path0,\"dump_K_\",\"\",str(i),False,True,1))",
"# # ##plt.spy(Fc_clust,markersize = .8);plt.show() # ##Gc_ = load_matrix(path0,\"dump_Gc_i_\",\"\",str(0),True,True,1) # # # ##r",
"= 0.125) # # # ##Fc_python_List = [] # ##if 0: ## Fc_clust_python",
"< 100: # markersize_ = 3 # else: # markersize_ = 0.7 #"
] |
[
"ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%', pad = 0.05)",
"z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1",
"cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\")",
"k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319]",
"= np.zeros(len(x) + 1) dx = np.zeros(len(x) + 1) dx[1:-1] = x[1:] -",
"as plt import os, sys import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable",
"= -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size",
"[1,100,1,100], s = 250, marker = \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad =",
"= 3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker =",
"10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin = -9, vmax = -2.8) ax.semilogx() ax.semilogy()",
"out orientation f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1]",
"plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh=",
"plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax",
"xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel =",
"fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig,",
"-9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 =",
"xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return xnew def",
"w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh",
"import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x)",
"np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals =",
"1.58489319] = 100.0 # flag to figure out orientation f_H2 = data[:,2] z_mesh",
"Factor)\" data = np.genfromtxt(datafile) # names = True) k27 = data[:,0] LW =",
"= 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad",
"cmap = 'magma', vmin = -9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel)",
"= z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap",
"= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1",
"orientation f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig,",
"np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3, linestyles = '-.')",
"dx = np.zeros(len(x) + 1) dx[1:-1] = x[1:] - x[:-1] dx[0] = dx[1]",
"- x[:-1] dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1] = x - 0.5*dx[:-1]",
"x - 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile =",
"def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW",
"= 250, marker = \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad",
"fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma',",
"= '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker = \"*\", color =",
"np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size =",
"k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)),",
"LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals)",
"data = np.genfromtxt(datafile) # names = True) k27 = data[:,0] LW = data[:,1]",
"divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])')",
"== 1.58489319] = 100.0 # flag to figure out orientation f_H2 = data[:,2]",
"= data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers =",
"bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx = np.zeros(len(x) + 1) dx[1:-1] =",
"ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx()",
"= np.genfromtxt(datafile) # names = True) k27 = data[:,0] LW = data[:,1] k27_centers",
"figure out orientation f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh =",
"= plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin =",
"make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label",
"= ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin = -9, vmax = -2.8)",
"- 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'):",
"= x - 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile",
"-2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size =",
"k27 = data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals",
"k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 # flag to figure out orientation f_H2",
"to figure out orientation f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh",
"k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 # flag to",
"fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3],",
"ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%', pad",
"return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel",
"data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)),",
"bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals,",
"\"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3]",
"np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel)",
"fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r',",
"vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right',",
"250, marker = \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad =",
"plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if __name__ ==",
"= np.zeros(len(x) + 1) dx[1:-1] = x[1:] - x[:-1] dx[0] = dx[1] dx[-1]",
"+ 1) dx[1:-1] = x[1:] - x[:-1] dx[0] = dx[1] dx[-1] = dx[-2]",
"= 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))),",
"= r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW Scale Factor)\" data = np.genfromtxt(datafile)",
"vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax)",
"k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 # flag",
"np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 =",
"vmin = -9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax)",
"int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh),",
"matplotlib.pyplot as plt import os, sys import numpy as np from mpl_toolkits.axes_grid1 import",
"dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] =",
"+ 1) dx = np.zeros(len(x) + 1) dx[1:-1] = x[1:] - x[:-1] dx[0]",
"np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh",
"plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape(",
"= make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1,",
"np.zeros(len(x) + 1) dx = np.zeros(len(x) + 1) dx[1:-1] = x[1:] - x[:-1]",
"colors = 'black', linewidths = 3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s",
"label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close()",
"size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on()",
"make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx = np.zeros(len(x) + 1)",
"0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)'",
"os, sys import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew",
"ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3,",
"= bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh =",
"z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap =",
"data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots()",
"Factor)' xlabel = \"log(LW Scale Factor)\" data = np.genfromtxt(datafile) # names = True)",
"fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax",
"pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0,",
"#z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T),",
"plt import os, sys import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable def",
"0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW))))",
"'magma', vmin = -9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider =",
"divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)')",
"xnew = np.zeros(len(x) + 1) dx = np.zeros(len(x) + 1) dx[1:-1] = x[1:]",
"ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin = -9, vmax = -2.8) ax.semilogx()",
"= np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0",
"'-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker = \"*\", color = \"white\")",
"Scale Factor)\" data = np.genfromtxt(datafile) # names = True) k27 = data[:,0] LW",
"k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh",
"\"log(LW Scale Factor)\" data = np.genfromtxt(datafile) # names = True) k27 = data[:,0]",
"* import matplotlib.pyplot as plt import os, sys import numpy as np from",
"f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax =",
"divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1,",
"0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if __name__ == \"__main__\": plot_2d_histogram( datafile",
"= 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels",
"names = True) k27 = data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)),",
"3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker = \"*\",",
"= dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return",
"from galaxy_analysis.plot.plot_styles import * import matplotlib.pyplot as plt import os, sys import numpy",
"= \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 =",
"1) dx = np.zeros(len(x) + 1) dx[1:-1] = x[1:] - x[:-1] dx[0] =",
"dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return xnew",
"# flag to figure out orientation f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))),",
"dx[-1] = dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1]",
"r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths",
"H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths =",
"img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax =",
"mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx = np.zeros(len(x)",
"import * import matplotlib.pyplot as plt import os, sys import numpy as np",
"= 100.0 # flag to figure out orientation f_H2 = data[:,2] z_mesh =",
"[K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if __name__",
"import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx = np.zeros(len(x) +",
"= 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider",
"def bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx = np.zeros(len(x) + 1) dx[1:-1]",
"cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel)",
"data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8)",
"#f_H2[data['k27'] == 1.58489319] = 100.0 # flag to figure out orientation f_H2 =",
"ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%', pad =",
"10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy()",
"Scale Factor)' xlabel = \"log(LW Scale Factor)\" data = np.genfromtxt(datafile) # names =",
"fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05)",
"= '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh),",
"bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] ==",
"1) dx[1:-1] = x[1:] - x[:-1] dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1]",
"= 0.05) fig.savefig(\"T.png\") plt.close() return if __name__ == \"__main__\": plot_2d_histogram( datafile = str(sys.argv[1]))",
"int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh),",
"data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers)",
"100.0 # flag to figure out orientation f_H2 = data[:,2] z_mesh = f_H2.reshape(",
"= 'magma', vmin = -9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider",
"0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1]",
"dx[1] dx[-1] = dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] = x[-1] +",
"ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin",
"from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx =",
"dx[1:-1] = x[1:] - x[:-1] dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1] =",
"= np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW))))",
"img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin = -9, vmax =",
"= dx[1] dx[-1] = dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1] = x[-1]",
"r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW Scale Factor)\" data = np.genfromtxt(datafile) #",
"x[1:] - x[:-1] dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1] = x -",
"np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh =",
"flag to figure out orientation f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW))))",
"label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors =",
"pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T),",
"= -9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1",
"))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh,",
"= r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return",
"LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 # flag to figure",
"= ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh)))",
"= 'black', linewidths = 3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s =",
"+ 0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale",
"color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2",
"plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin = np.min(np.log10(z_mesh)),",
"sys import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew =",
"= np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1",
"ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW Scale Factor)\" data =",
"True) k27 = data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) )))",
"int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers)",
"f_H2 = data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax",
"= r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black',",
"as np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) + 1)",
"int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers,",
"= np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh,",
"w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if __name__ == \"__main__\": plot_2d_histogram( datafile =",
"marker = \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05)",
"10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3, linestyles =",
"import matplotlib.pyplot as plt import os, sys import numpy as np from mpl_toolkits.axes_grid1",
"= bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27']",
"= 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if __name__ == \"__main__\": plot_2d_histogram(",
"plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if __name__ == \"__main__\":",
"ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size = '5%',",
"linewidths = 3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker",
"= \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\")",
"0.5*dx[:-1] xnew[-1] = x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel",
"plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin = -9,",
"# names = True) k27 = data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)),",
"ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker = \"*\", color = \"white\") plt.minorticks_on()",
"0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad =",
"\"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"fH2.png\") plt.close()",
"np.zeros(len(x) + 1) dx[1:-1] = x[1:] - x[:-1] dx[0] = dx[1] dx[-1] =",
"cax1 = divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label =",
"= True) k27 = data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27)",
"= [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3, linestyles = '-.') ax.scatter( [1,1,100,100],",
"LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers",
"cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors",
"'5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh),",
"= data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots()",
"ax = plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin",
"fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'magma', vmin = -9, vmax",
"Photodetachment Scale Factor)' xlabel = \"log(LW Scale Factor)\" data = np.genfromtxt(datafile) # names",
"np.genfromtxt(datafile) # names = True) k27 = data[:,0] LW = data[:,1] k27_centers =",
"np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 # flag to figure out orientation",
"plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW Scale",
"numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) +",
"size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour(",
"[-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100],",
"[1,1,100,100], [1,100,1,100], s = 250, marker = \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad",
"= \"log(LW Scale Factor)\" data = np.genfromtxt(datafile) # names = True) k27 =",
"= divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm",
"= divider.append_axes('right', size = '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature",
"LW_vals = bins_from_centers(LW_centers) k27_mesh, LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers)",
"= 0.05) fig.savefig(\"fH2.png\") plt.close() f_H2 = data[:,3] z_mesh= f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh =",
"LW_mesh = np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] =",
"= '5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad",
"linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250, marker = \"*\", color",
"= x[1:] - x[:-1] dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1] = x",
"np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals = bins_from_centers(k27_centers) LW_centers = np.linspace(np.log10(np.min(LW)), np.log10(np.max(LW)), int(np.sqrt(np.size(LW)))) LW_vals",
"galaxy_analysis.plot.plot_styles import * import matplotlib.pyplot as plt import os, sys import numpy as",
"vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right',",
"import os, sys import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x):",
"10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3, linestyles",
"'black', linewidths = 3, linestyles = '-.') ax.scatter( [1,1,100,100], [1,100,1,100], s = 250,",
"x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment",
"xlabel = \"log(LW Scale Factor)\" data = np.genfromtxt(datafile) # names = True) k27",
"np from mpl_toolkits.axes_grid1 import make_axes_locatable def bins_from_centers(x): xnew = np.zeros(len(x) + 1) dx",
"= x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$",
"'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW Scale Factor)\" data",
"f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8) img1 =",
"levels = [-8,-7,-6,-5,-4,-3], colors = 'black', linewidths = 3, linestyles = '-.') ax.scatter(",
"r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad = 0, w_pad = 0.05) fig.savefig(\"T.png\") plt.close() return if",
"= np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 # flag to figure out",
"= plt.subplots() fig.set_size_inches(8,8) img1 = ax.pcolormesh(10.0**(LW_mesh), 10.0**(k27_mesh), np.log10(z_mesh.T), cmap = 'RdYlBu_r', vmin =",
"s = 250, marker = \"*\", color = \"white\") plt.minorticks_on() plt.tight_layout(h_pad = 0,",
"x[:-1] dx[0] = dx[1] dx[-1] = dx[-2] xnew[:-1] = x - 0.5*dx[:-1] xnew[-1]",
"z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax = plt.subplots() fig.set_size_inches(8,8)",
"= np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider = make_axes_locatable(ax) cax1 = divider.append_axes('right', size",
"'5%', pad = 0.05) fig.colorbar(img1, cax=cax1, label = r'log(Temperature [K])') plt.minorticks_on() plt.tight_layout(h_pad =",
"xnew[-1] = x[-1] + 0.5*dx[-1] return xnew def plot_2d_histogram(datafile = 'all_runs_d_12.20.dat'): ylabel =",
"= data[:,0] LW = data[:,1] k27_centers = np.linspace(np.log10(np.min(k27)), np.log10(np.max(k27)), int(np.sqrt(np.size(k27) ))) k27_vals =",
"np.meshgrid(LW_vals, k27_vals) k27_center_mesh, LW_center_mesh = np.meshgrid(LW_centers, k27_centers) #f_H2[data['k27'] == 1.58489319] = 100.0 #",
"= data[:,2] z_mesh = f_H2.reshape( int(np.sqrt(np.size(k27))), int(np.sqrt(np.size(LW)))) #z_mesh = z[:-1,:-1] fig, ax =",
"0.05) fig.colorbar(img1, cax=cax1, label = r'log(f$_{\\rm H_2}$)') ax.contour( 10.**(LW_center_mesh), 10.0**(k27_center_mesh), np.log10(z_mesh.T), levels =",
"'RdYlBu_r', vmin = np.min(np.log10(z_mesh)), vmax = np.max(np.log10(z_mesh))) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) divider =",
"= 'all_runs_d_12.20.dat'): ylabel = r'log(H$^{-}$ Photodetachment Scale Factor)' xlabel = \"log(LW Scale Factor)\"",
"np.log10(z_mesh.T), cmap = 'magma', vmin = -9, vmax = -2.8) ax.semilogx() ax.semilogy() ax.set_xlabel(xlabel)"
] |
[
"import BaseModel from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import",
"from .base_x_lumped import BaseModel from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from",
".base_x_lumped import BaseModel from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors",
"from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import CurrentCollector1D from",
"<gh_stars>1-10 from .base_x_lumped import BaseModel from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D",
".x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import CurrentCollector1D from .x_lumped_2D_current_collectors",
"import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import CurrentCollector1D from .x_lumped_2D_current_collectors import",
"NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import CurrentCollector1D from .x_lumped_2D_current_collectors import CurrentCollector2D",
"BaseModel from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import CurrentCollector1D"
] |
[
"httplib import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in",
"python # DickServ IRC Bot - Developed by acidvegas in Python (https://acid.vegas/dickserv) #",
"import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api:",
"temp = api['current_observation']['temp_f'] return 'The weather for {0}, {1}, {2} is {3} at",
"api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather for {0}, {1}, {2} is {3}",
"= api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather for {0}, {1}, {2} is",
"api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api: city = api['current_observation']['display_location']['city'] state",
"= httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api: city = api['current_observation']['display_location']['city'] state =",
"# DickServ IRC Bot - Developed by acidvegas in Python (https://acid.vegas/dickserv) # weather.py",
"zip_code)) if 'error' not in api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country",
"api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather",
"in api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather =",
"# weather.py import httplib import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if",
"{0}, {1}, {2} is {3} at {4} F'.format(city, state, country, weather, temp) else:",
"Bot - Developed by acidvegas in Python (https://acid.vegas/dickserv) # weather.py import httplib import",
"if 'error' not in api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country =",
"weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather for {0}, {1}, {2}",
"weather for {0}, {1}, {2} is {3} at {4} F'.format(city, state, country, weather,",
"api['current_observation']['temp_f'] return 'The weather for {0}, {1}, {2} is {3} at {4} F'.format(city,",
"Developed by acidvegas in Python (https://acid.vegas/dickserv) # weather.py import httplib import config def",
"#!/usr/bin/env python # DickServ IRC Bot - Developed by acidvegas in Python (https://acid.vegas/dickserv)",
"lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api: city = api['current_observation']['display_location']['city']",
"not in api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather",
"api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather for {0}, {1},",
"IRC Bot - Developed by acidvegas in Python (https://acid.vegas/dickserv) # weather.py import httplib",
"state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return",
"country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather for",
"'The weather for {0}, {1}, {2} is {3} at {4} F'.format(city, state, country,",
"DickServ IRC Bot - Developed by acidvegas in Python (https://acid.vegas/dickserv) # weather.py import",
"'error' not in api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country']",
"(https://acid.vegas/dickserv) # weather.py import httplib import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code))",
"city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp",
"config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api: city",
"in Python (https://acid.vegas/dickserv) # weather.py import httplib import config def lookup(zip_code): api =",
"api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather']",
"- Developed by acidvegas in Python (https://acid.vegas/dickserv) # weather.py import httplib import config",
"api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f']",
"weather.py import httplib import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error'",
"{1}, {2} is {3} at {4} F'.format(city, state, country, weather, temp) else: return",
"Python (https://acid.vegas/dickserv) # weather.py import httplib import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key,",
"{2} is {3} at {4} F'.format(city, state, country, weather, temp) else: return False",
"= api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The weather for {0},",
"import httplib import config def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not",
"= api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp =",
"httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api: city = api['current_observation']['display_location']['city'] state = api['current_observation']['display_location']['state']",
"= api['current_observation']['display_location']['state'] country = api['current_observation']['display_location']['country'] weather = api['current_observation']['weather'] temp = api['current_observation']['temp_f'] return 'The",
"= api['current_observation']['temp_f'] return 'The weather for {0}, {1}, {2} is {3} at {4}",
"by acidvegas in Python (https://acid.vegas/dickserv) # weather.py import httplib import config def lookup(zip_code):",
"for {0}, {1}, {2} is {3} at {4} F'.format(city, state, country, weather, temp)",
"acidvegas in Python (https://acid.vegas/dickserv) # weather.py import httplib import config def lookup(zip_code): api",
"return 'The weather for {0}, {1}, {2} is {3} at {4} F'.format(city, state,",
"def lookup(zip_code): api = httplib.get_json('http://api.wunderground.com/api/{0}/conditions/q/{1}.json'.format(config.api.wunderground_api_key, zip_code)) if 'error' not in api: city ="
] |
[
"by frequency \"\"\" from itertools import chain def combined_data(*dictionary): d = dict() for",
"d.setdefault(key, 0) + dict_temp[key] return { k: v for k, v in sorted(d.items(),",
"x[1], reverse=True)) d1 = { 'python': 10, 'java': 3, 'c#': 8, 'javascript': 15",
"dictionary: for key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return {",
"to analyze this data and return a dictionary that contains words and their",
"- Your job is to combine this data to create a single dictionary",
"data spread across multiple servers. Each server is able to analyze this data",
"that contains words and their frequency. - Your job is to combine this",
"frequency. - Your job is to combine this data to create a single",
"in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return { k: v for",
"combined_data1(*dictionary): d = dict() for dict_temp in dictionary: for key in dict_temp: d[key]",
"Your job is to combine this data to create a single dictionary that",
"sorted(d.items(), key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary): d = dict() for dict_temp",
"dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 = { 'python': 10, 'java':",
"text data spread across multiple servers. Each server is able to analyze this",
"{ 'java': 10, 'c++': 10, 'c#': 4, 'go': 9, 'python': 6 } d3",
"have text data spread across multiple servers. Each server is able to analyze",
"} d = combined_data(d1, d2, d3) print(d) d1 = combined_data1(d1, d2, d3) print(d1)",
"dictionary that contains all the words and their combined frequencies from all these",
"able to analyze this data and return a dictionary that contains words and",
"words and their frequency. - Your job is to combine this data to",
"multiple servers. Each server is able to analyze this data and return a",
"dictionary sorted by frequency \"\"\" from itertools import chain def combined_data(*dictionary): d =",
"'javascript': 15 } d2 = { 'java': 10, 'c++': 10, 'c#': 4, 'go':",
"combine this data to create a single dictionary that contains all the words",
"'c++': 10, 'c#': 4, 'go': 9, 'python': 6 } d3 = { 'erlang':",
"frequency \"\"\" from itertools import chain def combined_data(*dictionary): d = dict() for dict_temp",
"= { 'java': 10, 'c++': 10, 'c#': 4, 'go': 9, 'python': 6 }",
"= d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 =",
"itertools import chain def combined_data(*dictionary): d = dict() for dict_temp in dictionary: for",
"'c#': 4, 'go': 9, 'python': 6 } d3 = { 'erlang': 5, 'haskell':",
"= dict() for dict_temp in dictionary: for key in dict_temp: d[key] = d.setdefault(key,",
"words and their combined frequencies from all these data sources - Bonus points",
"import chain def combined_data(*dictionary): d = dict() for dict_temp in dictionary: for key",
"} def combined_data1(*dictionary): d = dict() for dict_temp in dictionary: for key in",
"'java': 10, 'c++': 10, 'c#': 4, 'go': 9, 'python': 6 } d3 =",
"v for k, v in sorted(d.items(), key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary):",
"= d.setdefault(key, 0) + dict_temp[key] return { k: v for k, v in",
"Each server is able to analyze this data and return a dictionary that",
"1 } d = combined_data(d1, d2, d3) print(d) d1 = combined_data1(d1, d2, d3)",
"{ k: v for k, v in sorted(d.items(), key=lambda x: x[1], reverse=True) }",
"d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 = {",
"v in sorted(d.items(), key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary): d = dict()",
"their frequency. - Your job is to combine this data to create a",
"frequencies from all these data sources - Bonus points if you can make",
"'pascal': 1 } d = combined_data(d1, d2, d3) print(d) d1 = combined_data1(d1, d2,",
"sorted by frequency \"\"\" from itertools import chain def combined_data(*dictionary): d = dict()",
"this data to create a single dictionary that contains all the words and",
"9, 'python': 6 } d3 = { 'erlang': 5, 'haskell': 2, 'python': 1,",
"the words and their combined frequencies from all these data sources - Bonus",
"5, 'haskell': 2, 'python': 1, 'pascal': 1 } d = combined_data(d1, d2, d3)",
"x: x[1], reverse=True)) d1 = { 'python': 10, 'java': 3, 'c#': 8, 'javascript':",
"a single dictionary that contains all the words and their combined frequencies from",
"this data and return a dictionary that contains words and their frequency. -",
"10, 'java': 3, 'c#': 8, 'javascript': 15 } d2 = { 'java': 10,",
"3, 'c#': 8, 'javascript': 15 } d2 = { 'java': 10, 'c++': 10,",
"points if you can make your dictionary sorted by frequency \"\"\" from itertools",
"dict_temp in dictionary: for key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key]",
"8, 'javascript': 15 } d2 = { 'java': 10, 'c++': 10, 'c#': 4,",
"{ 'erlang': 5, 'haskell': 2, 'python': 1, 'pascal': 1 } d = combined_data(d1,",
"server is able to analyze this data and return a dictionary that contains",
"all these data sources - Bonus points if you can make your dictionary",
"- You have text data spread across multiple servers. Each server is able",
"15 } d2 = { 'java': 10, 'c++': 10, 'c#': 4, 'go': 9,",
"x: x[1], reverse=True) } def combined_data1(*dictionary): d = dict() for dict_temp in dictionary:",
"key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary): d = dict() for dict_temp in",
"10, 'c++': 10, 'c#': 4, 'go': 9, 'python': 6 } d3 = {",
"k, v in sorted(d.items(), key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary): d =",
"'java': 3, 'c#': 8, 'javascript': 15 } d2 = { 'java': 10, 'c++':",
"across multiple servers. Each server is able to analyze this data and return",
"You have text data spread across multiple servers. Each server is able to",
"single dictionary that contains all the words and their combined frequencies from all",
"\"\"\" from itertools import chain def combined_data(*dictionary): d = dict() for dict_temp in",
"for k, v in sorted(d.items(), key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary): d",
"0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 = { 'python':",
"k: v for k, v in sorted(d.items(), key=lambda x: x[1], reverse=True) } def",
"'erlang': 5, 'haskell': 2, 'python': 1, 'pascal': 1 } d = combined_data(d1, d2,",
"data and return a dictionary that contains words and their frequency. - Your",
"{ 'python': 10, 'java': 3, 'c#': 8, 'javascript': 15 } d2 = {",
"from itertools import chain def combined_data(*dictionary): d = dict() for dict_temp in dictionary:",
"'python': 1, 'pascal': 1 } d = combined_data(d1, d2, d3) print(d) d1 =",
"chain def combined_data(*dictionary): d = dict() for dict_temp in dictionary: for key in",
"def combined_data1(*dictionary): d = dict() for dict_temp in dictionary: for key in dict_temp:",
"'python': 10, 'java': 3, 'c#': 8, 'javascript': 15 } d2 = { 'java':",
"dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 = { 'python': 10, 'java': 3, 'c#':",
"you can make your dictionary sorted by frequency \"\"\" from itertools import chain",
"10, 'c#': 4, 'go': 9, 'python': 6 } d3 = { 'erlang': 5,",
"def combined_data(*dictionary): d = dict() for dict_temp in dictionary: for key in dict_temp:",
"'python': 6 } d3 = { 'erlang': 5, 'haskell': 2, 'python': 1, 'pascal':",
"if you can make your dictionary sorted by frequency \"\"\" from itertools import",
"in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1],",
"dictionary: for key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(),",
"d2 = { 'java': 10, 'c++': 10, 'c#': 4, 'go': 9, 'python': 6",
"+ dict_temp[key] return { k: v for k, v in sorted(d.items(), key=lambda x:",
"d3 = { 'erlang': 5, 'haskell': 2, 'python': 1, 'pascal': 1 } d",
"d = dict() for dict_temp in dictionary: for key in dict_temp: d[key] =",
"and their frequency. - Your job is to combine this data to create",
"} d2 = { 'java': 10, 'c++': 10, 'c#': 4, 'go': 9, 'python':",
"a dictionary that contains words and their frequency. - Your job is to",
"to combine this data to create a single dictionary that contains all the",
"combined frequencies from all these data sources - Bonus points if you can",
"dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True))",
"dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return { k: v for k,",
"+ dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 = { 'python': 10,",
"contains words and their frequency. - Your job is to combine this data",
"key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x:",
"create a single dictionary that contains all the words and their combined frequencies",
"analyze this data and return a dictionary that contains words and their frequency.",
"your dictionary sorted by frequency \"\"\" from itertools import chain def combined_data(*dictionary): d",
"d1 = { 'python': 10, 'java': 3, 'c#': 8, 'javascript': 15 } d2",
"1, 'pascal': 1 } d = combined_data(d1, d2, d3) print(d) d1 = combined_data1(d1,",
"6 } d3 = { 'erlang': 5, 'haskell': 2, 'python': 1, 'pascal': 1",
"d[key] = d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1",
"from all these data sources - Bonus points if you can make your",
"dict_temp[key] return { k: v for k, v in sorted(d.items(), key=lambda x: x[1],",
"2, 'python': 1, 'pascal': 1 } d = combined_data(d1, d2, d3) print(d) d1",
"= { 'python': 10, 'java': 3, 'c#': 8, 'javascript': 15 } d2 =",
"reverse=True) } def combined_data1(*dictionary): d = dict() for dict_temp in dictionary: for key",
"return a dictionary that contains words and their frequency. - Your job is",
"to create a single dictionary that contains all the words and their combined",
"Bonus points if you can make your dictionary sorted by frequency \"\"\" from",
"data to create a single dictionary that contains all the words and their",
"in sorted(d.items(), key=lambda x: x[1], reverse=True) } def combined_data1(*dictionary): d = dict() for",
"and return a dictionary that contains words and their frequency. - Your job",
"all the words and their combined frequencies from all these data sources -",
"spread across multiple servers. Each server is able to analyze this data and",
"combined_data(*dictionary): d = dict() for dict_temp in dictionary: for key in dict_temp: d[key]",
"is to combine this data to create a single dictionary that contains all",
"d[key] = d.setdefault(key, 0) + dict_temp[key] return { k: v for k, v",
"servers. Each server is able to analyze this data and return a dictionary",
"sources - Bonus points if you can make your dictionary sorted by frequency",
"for key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return { k:",
"for key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return dict(sorted(d.items(), key=lambda",
"in dictionary: for key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return",
"4, 'go': 9, 'python': 6 } d3 = { 'erlang': 5, 'haskell': 2,",
"= { 'erlang': 5, 'haskell': 2, 'python': 1, 'pascal': 1 } d =",
"their combined frequencies from all these data sources - Bonus points if you",
"is able to analyze this data and return a dictionary that contains words",
"key in dict_temp: d[key] = d.setdefault(key, 0) + dict_temp[key] return { k: v",
"'c#': 8, 'javascript': 15 } d2 = { 'java': 10, 'c++': 10, 'c#':",
"key=lambda x: x[1], reverse=True)) d1 = { 'python': 10, 'java': 3, 'c#': 8,",
"for dict_temp in dictionary: for key in dict_temp: d[key] = d.setdefault(key, 0) +",
"that contains all the words and their combined frequencies from all these data",
"contains all the words and their combined frequencies from all these data sources",
"- Bonus points if you can make your dictionary sorted by frequency \"\"\"",
"data sources - Bonus points if you can make your dictionary sorted by",
"x[1], reverse=True) } def combined_data1(*dictionary): d = dict() for dict_temp in dictionary: for",
"these data sources - Bonus points if you can make your dictionary sorted",
"'go': 9, 'python': 6 } d3 = { 'erlang': 5, 'haskell': 2, 'python':",
"} d3 = { 'erlang': 5, 'haskell': 2, 'python': 1, 'pascal': 1 }",
"and their combined frequencies from all these data sources - Bonus points if",
"return { k: v for k, v in sorted(d.items(), key=lambda x: x[1], reverse=True)",
"reverse=True)) d1 = { 'python': 10, 'java': 3, 'c#': 8, 'javascript': 15 }",
"make your dictionary sorted by frequency \"\"\" from itertools import chain def combined_data(*dictionary):",
"job is to combine this data to create a single dictionary that contains",
"\"\"\" - You have text data spread across multiple servers. Each server is",
"0) + dict_temp[key] return { k: v for k, v in sorted(d.items(), key=lambda",
"dict() for dict_temp in dictionary: for key in dict_temp: d[key] = d.setdefault(key, 0)",
"return dict(sorted(d.items(), key=lambda x: x[1], reverse=True)) d1 = { 'python': 10, 'java': 3,",
"'haskell': 2, 'python': 1, 'pascal': 1 } d = combined_data(d1, d2, d3) print(d)",
"can make your dictionary sorted by frequency \"\"\" from itertools import chain def",
"dictionary that contains words and their frequency. - Your job is to combine"
] |
[
"}, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' },",
"required AWS CodeBuild projects for a repo') parser.add_argument('project', type=str, help='The name of the",
"= build # Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild')",
"}, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False,",
"'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common",
"or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import boto3",
"Replace all templates with the values above def do_replace(obj): if isinstance(obj, dict): for",
"default='default', help='The profile in ~/.aws/credentials to use when creating the jobs') args =",
"all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for",
"config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] =",
"account that owns the repo') parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials to",
"# Parse required options parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for",
"argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for a repo') parser.add_argument('project', type=str, help='The name",
"'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type':",
"to feed to CodeBuild ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04',",
"The list of all of our build configs paired with their environments BUILD_CONFIGS",
"AWS CodeBuild projects for a repo') parser.add_argument('project', type=str, help='The name of the repo",
"'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common enviroment",
"= {} for config in BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env",
"out which projects already exist and should be updated, and which must be",
"}, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' },",
"{ 'project': args.project, 'build': build_name, 'account': args.github_account, } # Replace all templates with",
"import argparse import boto3 # Parse required options parser = argparse.ArgumentParser(description='Creates all required",
"'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM',",
"all final build objects BUILDS = {} for config in BUILD_CONFIGS: build_name =",
"help='The GitHub account that owns the repo') parser.add_argument('--profile', type=str, default='default', help='The profile in",
"False, } # The common enviroment objects to feed to CodeBuild ENVIRONMENTS =",
"'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64',",
"affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import boto3 # Parse",
"CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1,",
"True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False,",
"{ 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, {",
"help='The name of the repo to create the projects for') parser.add_argument('--github-account', type=str, dest='github_account',",
"'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole':",
"build objects BUILDS = {} for config in BUILD_CONFIGS: build_name = config['build'] build",
"{ 'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True",
"parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account that owns the repo') parser.add_argument('--profile', type=str,",
"isinstance(obj, str): return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build #",
"exist and should be updated, and which must be created all_project_names = list(BUILDS.keys())",
"CodeBuild ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables':",
"dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params =",
"argparse import boto3 # Parse required options parser = argparse.ArgumentParser(description='Creates all required AWS",
"with all final build objects BUILDS = {} for config in BUILD_CONFIGS: build_name",
"{ 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, {",
"codebuild = session.client('codebuild') # Find out which projects already exist and should be",
"= existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in existing_projects['projects']] # Actually create the",
"'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, } #",
"should be updated, and which must be created all_project_names = list(BUILDS.keys()) existing_projects =",
"{ 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, {",
"'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env':",
"{ 'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment':",
"args.project, 'build': build_name, 'account': args.github_account, } # Replace all templates with the values",
"all required AWS CodeBuild projects for a repo') parser.add_argument('project', type=str, help='The name of",
"'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env':",
"= codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in existing_projects['projects']] #",
"build_name, 'account': args.github_account, } # Replace all templates with the values above def",
"'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params = { 'project':",
"'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': { 'type':",
"} # Replace all templates with the values above def do_replace(obj): if isinstance(obj,",
"'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True, },",
"do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1')",
"'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus':",
"type=str, help='The name of the repo to create the projects for') parser.add_argument('--github-account', type=str,",
"'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common enviroment objects to feed to",
"in BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in",
"{ 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False,",
"create the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account that owns",
"arguments to be passed to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': {",
"False, }, } # The list of all of our build configs paired",
"'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType':",
"build in BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name",
"'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType':",
"values above def do_replace(obj): if isinstance(obj, dict): for key, value in obj.items(): obj[key]",
"'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True }, { 'build':",
"be updated, and which must be created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names)",
"env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment'] = env",
"to be passed to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': { 'type':",
"'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': {",
"of the repo to create the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The",
"type=str, default='default', help='The profile in ~/.aws/credentials to use when creating the jobs') args",
"'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env':",
"to use when creating the jobs') args = parser.parse_args() # The template for",
"[project['name'] for project in existing_projects['projects']] # Actually create the projects for build_name, build",
"profile in ~/.aws/credentials to use when creating the jobs') args = parser.parse_args() #",
"'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build':",
"Find out which projects already exist and should be updated, and which must",
"'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True,",
"[ { 'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged':",
"with their environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env': 'linux' }, {",
"'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86',",
"{ 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, }",
"'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64',",
"of our build configs paired with their environments BUILD_CONFIGS = [ { 'build':",
"populate the BUILDS list with all final build objects BUILDS = {} for",
"do_replace(obj): if isinstance(obj, dict): for key, value in obj.items(): obj[key] = do_replace(value) return",
"'env': 'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build':",
"new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects: print('{}: Updating'.format(build_name)) codebuild.update_project(**build) else:",
"[], 'privilegedMode': False, }, } # The list of all of our build",
"config['privileged'] build['environment'] = env sub_params = { 'project': args.project, 'build': build_name, 'account': args.github_account,",
"'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest',",
"True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux'",
"'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env':",
"which projects already exist and should be updated, and which must be created",
"'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build':",
"session.client('codebuild') # Find out which projects already exist and should be updated, and",
"for project in existing_projects['projects']] # Actually create the projects for build_name, build in",
"BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects:",
"new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in existing_projects['projects']] # Actually create",
"'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86',",
"'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully populate the",
"env sub_params = { 'project': args.project, 'build': build_name, 'account': args.github_account, } # Replace",
"GitHub account that owns the repo') parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials",
"do_replace(value) return obj elif isinstance(obj, str): return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project,",
"}, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, }",
"build configs paired with their environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env':",
"= dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment']",
"'123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image':",
"{ 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux'",
"Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse",
"'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully populate the BUILDS list with",
"that owns the repo') parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials to use",
"}, ] # Fully populate the BUILDS list with all final build objects",
"{ 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully populate the BUILDS list",
"# The common enviroment objects to feed to CodeBuild ENVIRONMENTS = { 'linux':",
"and should be updated, and which must be created all_project_names = list(BUILDS.keys()) existing_projects",
"'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015'",
"= config['privileged'] build['environment'] = env sub_params = { 'project': args.project, 'build': build_name, 'account':",
"}, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' },",
"= do_replace(value) return obj elif isinstance(obj, str): return obj.format(**sub_params) else: return obj do_replace(build)",
"sub_params = { 'project': args.project, 'build': build_name, 'account': args.github_account, } # Replace all",
"'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [],",
"if isinstance(obj, dict): for key, value in obj.items(): obj[key] = do_replace(value) return obj",
"args.github_account, } # Replace all templates with the values above def do_replace(obj): if",
"} # The list of all of our build configs paired with their",
"'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': { 'type':",
"repo to create the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account",
"the values above def do_replace(obj): if isinstance(obj, dict): for key, value in obj.items():",
"obj elif isinstance(obj, str): return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] =",
"'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux'",
"'privilegedMode': False, }, } # The list of all of our build configs",
"'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } #",
"'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build':",
"# Actually create the projects for build_name, build in BUILDS.items(): if build_name in",
"'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': { 'type':",
"= env sub_params = { 'project': args.project, 'build': build_name, 'account': args.github_account, } #",
"default='awslabs', help='The GitHub account that owns the repo') parser.add_argument('--profile', type=str, default='default', help='The profile",
"'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': {",
"boto3 # Parse required options parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects",
"{ 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, {",
"} # The common enviroment objects to feed to CodeBuild ENVIRONMENTS = {",
"'linux-clang6-x64', 'env': 'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, {",
"already exist and should be updated, and which must be created all_project_names =",
"'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' },",
"the repo to create the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub",
"{ 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ]",
"with the values above def do_replace(obj): if isinstance(obj, dict): for key, value in",
"Actually create the projects for build_name, build in BUILDS.items(): if build_name in new_projects:",
"= [project['name'] for project in existing_projects['projects']] # Actually create the projects for build_name,",
"be created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects =",
"in config: env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params = { 'project': args.project,",
"'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017',",
"Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import",
"def do_replace(obj): if isinstance(obj, dict): for key, value in obj.items(): obj[key] = do_replace(value)",
"templates with the values above def do_replace(obj): if isinstance(obj, dict): for key, value",
"projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account that owns the repo')",
"above def do_replace(obj): if isinstance(obj, dict): for key, value in obj.items(): obj[key] =",
"{ 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015':",
"list with all final build objects BUILDS = {} for config in BUILD_CONFIGS:",
"All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import boto3 # Parse required",
"which must be created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound']",
"BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env':",
"value in obj.items(): obj[key] = do_replace(value) return obj elif isinstance(obj, str): return obj.format(**sub_params)",
"}, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False,",
"config in BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if",
"'environmentVariables': [], 'privilegedMode': False, }, } # The list of all of our",
"existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in existing_projects['projects']] # Actually create the projects",
"projects for a repo') parser.add_argument('project', type=str, help='The name of the repo to create",
"build['environment'] = env sub_params = { 'project': args.project, 'build': build_name, 'account': args.github_account, }",
"Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import",
"for the arguments to be passed to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}',",
"to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git',",
"'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, },",
"type=str, dest='github_account', default='awslabs', help='The GitHub account that owns the repo') parser.add_argument('--profile', type=str, default='default',",
"'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, },",
"= { 'project': args.project, 'build': build_name, 'account': args.github_account, } # Replace all templates",
"paired with their environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env': 'linux' },",
"all templates with the values above def do_replace(obj): if isinstance(obj, dict): for key,",
"'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully populate",
"enviroment objects to feed to CodeBuild ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER',",
"its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import boto3 #",
"'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env':",
"projects already exist and should be updated, and which must be created all_project_names",
"print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects: print('{}: Updating'.format(build_name)) codebuild.update_project(**build) else: assert",
"'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015',",
"'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts':",
"'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth':",
"'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER',",
"dict): for key, value in obj.items(): obj[key] = do_replace(value) return obj elif isinstance(obj,",
"help='The profile in ~/.aws/credentials to use when creating the jobs') args = parser.parse_args()",
"the BUILDS list with all final build objects BUILDS = {} for config",
"codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find out which projects",
"and which must be created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects =",
"}, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' },",
"Apache-2.0. import argparse import boto3 # Parse required options parser = argparse.ArgumentParser(description='Creates all",
"parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for a repo') parser.add_argument('project', type=str,",
"'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017'",
"'account': args.github_account, } # Replace all templates with the values above def do_replace(obj):",
"create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth':",
"= dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params",
"projects for build_name, build in BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build)",
"if 'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params = {",
"1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': {",
"else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to codebuild session",
"'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, },",
"}, 'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole',",
"build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] = config['privileged']",
"for build_name, build in BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name)",
"'project': args.project, 'build': build_name, 'account': args.github_account, } # Replace all templates with the",
"to create the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account that",
"env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params = { 'project': args.project, 'build': build_name,",
"build_name)] = build # Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild =",
"session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find out which projects already",
"existing_projects['projects']] # Actually create the projects for build_name, build in BUILDS.items(): if build_name",
"}, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux' },",
"SPDX-License-Identifier: Apache-2.0. import argparse import boto3 # Parse required options parser = argparse.ArgumentParser(description='Creates",
"{ 'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, {",
"}, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' },",
"'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64', 'env': 'linux'",
"for key, value in obj.items(): obj[key] = do_replace(value) return obj elif isinstance(obj, str):",
"= config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode']",
"'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env':",
"# Replace all templates with the values above def do_replace(obj): if isinstance(obj, dict):",
"[], 'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables':",
"= argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for a repo') parser.add_argument('project', type=str, help='The",
"'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common enviroment objects to",
"config: env['privilegedMode'] = config['privileged'] build['environment'] = env sub_params = { 'project': args.project, 'build':",
"}, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' },",
"in BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged'",
"# Find out which projects already exist and should be updated, and which",
"{} for config in BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env =",
"= list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for project",
"[], 'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables':",
"# The list of all of our build configs paired with their environments",
"'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux'",
"'123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, } # The list of",
"'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS',",
"their environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build':",
"'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully",
"# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0.",
"dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config: env['privilegedMode'] = config['privileged'] build['environment'] =",
"False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode':",
"'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully populate the BUILDS list with all",
"'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None,",
"'badgeEnabled': False, } # The common enviroment objects to feed to CodeBuild ENVIRONMENTS",
"ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [],",
"None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common enviroment objects to feed",
"list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in",
"in obj.items(): obj[key] = do_replace(value) return obj elif isinstance(obj, str): return obj.format(**sub_params) else:",
"return obj elif isinstance(obj, str): return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)]",
"all of our build configs paired with their environments BUILD_CONFIGS = [ {",
"to CodeBuild ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL',",
"{ 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH',",
"Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find out",
"codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in existing_projects['projects']] # Actually",
"'auth': { 'type': 'OAUTH', }, 'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', },",
"'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] #",
"existing_projects = [project['name'] for project in existing_projects['projects']] # Actually create the projects for",
"obj.items(): obj[key] = do_replace(value) return obj elif isinstance(obj, str): return obj.format(**sub_params) else: return",
"environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64',",
"elif isinstance(obj, str): return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build",
"our build configs paired with their environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64',",
"False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode':",
"build_name, build in BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif",
"required options parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for a repo')",
"common enviroment objects to feed to CodeBuild ENVIRONMENTS = { 'linux': { 'type':",
"'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common enviroment objects to feed to CodeBuild",
"] # Fully populate the BUILDS list with all final build objects BUILDS",
"# Fully populate the BUILDS list with all final build objects BUILDS =",
"Rights Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import boto3 # Parse required options",
"BUILDS = {} for config in BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE)",
"BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in",
"parser.add_argument('project', type=str, help='The name of the repo to create the projects for') parser.add_argument('--github-account',",
"template for the arguments to be passed to create_project CREATE_PARAM_TEMPLATE = { 'name':",
"build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']]) if 'privileged' in config:",
"the projects for build_name, build in BUILDS.items(): if build_name in new_projects: print('{}: Creating'.format(build_name))",
"= { 'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec':",
"region_name='us-east-1') codebuild = session.client('codebuild') # Find out which projects already exist and should",
"use when creating the jobs') args = parser.parse_args() # The template for the",
"for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account that owns the repo') parser.add_argument('--profile',",
"import boto3 # Parse required options parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild",
"'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env':",
"obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to codebuild session = boto3.Session(profile_name=args.profile,",
"dest='github_account', default='awslabs', help='The GitHub account that owns the repo') parser.add_argument('--profile', type=str, default='default', help='The",
"to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find out which",
"'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-7x-x64',",
"repo') parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials to use when creating the",
"creating the jobs') args = parser.parse_args() # The template for the arguments to",
"{ 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The",
"isinstance(obj, dict): for key, value in obj.items(): obj[key] = do_replace(value) return obj elif",
"options parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for a repo') parser.add_argument('project',",
"'env': 'windows-2015' }, ] # Fully populate the BUILDS list with all final",
"build # Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') #",
"'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015' }, { 'build':",
"'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, } # The",
"Fully populate the BUILDS list with all final build objects BUILDS = {}",
"updated, and which must be created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects",
"be passed to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': { 'type': 'GITHUB',",
"str): return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect",
"The template for the arguments to be passed to create_project CREATE_PARAM_TEMPLATE = {",
"Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects: print('{}: Updating'.format(build_name)) codebuild.update_project(**build) else: assert False",
"obj[key] = do_replace(value) return obj elif isinstance(obj, str): return obj.format(**sub_params) else: return obj",
"'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': { 'type': 'OAUTH', },",
"'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, } # The list",
"'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest',",
"in existing_projects['projects']] # Actually create the projects for build_name, build in BUILDS.items(): if",
"'{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml', 'auth': {",
"~/.aws/credentials to use when creating the jobs') args = parser.parse_args() # The template",
"a repo') parser.add_argument('project', type=str, help='The name of the repo to create the projects",
"'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True },",
"create the projects for build_name, build in BUILDS.items(): if build_name in new_projects: print('{}:",
"= [ { 'build': 'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux',",
"'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, } # The list of all of",
"list of all of our build configs paired with their environments BUILD_CONFIGS =",
"feed to CodeBuild ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType':",
"# The template for the arguments to be passed to create_project CREATE_PARAM_TEMPLATE =",
"the arguments to be passed to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source':",
"in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects: print('{}: Updating'.format(build_name)) codebuild.update_project(**build)",
"BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild",
"the jobs') args = parser.parse_args() # The template for the arguments to be",
"{ 'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location': 'https://github.com/{account}/{project}.git', 'gitCloneDepth': 1, 'buildspec': 'codebuild/{build}.yml',",
"}, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True }, { 'build': 'linux-gcc-4x-x64', 'env':",
"objects to feed to CodeBuild ENVIRONMENTS = { 'linux': { 'type': 'LINUX_CONTAINER', 'image':",
"'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [], 'privilegedMode': False, }, } # The list of all",
"for config in BUILD_CONFIGS: build_name = config['build'] build = dict(CREATE_PARAM_TEMPLATE) env = dict(ENVIRONMENTS[config['env']])",
"BUILDS list with all final build objects BUILDS = {} for config in",
"Parse required options parser = argparse.ArgumentParser(description='Creates all required AWS CodeBuild projects for a",
"}, } # The list of all of our build configs paired with",
"final build objects BUILDS = {} for config in BUILD_CONFIGS: build_name = config['build']",
"args = parser.parse_args() # The template for the arguments to be passed to",
"objects BUILDS = {} for config in BUILD_CONFIGS: build_name = config['build'] build =",
"parser.parse_args() # The template for the arguments to be passed to create_project CREATE_PARAM_TEMPLATE",
"}, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled': False, } # The common enviroment objects",
"jobs') args = parser.parse_args() # The template for the arguments to be passed",
"'reportBuildStatus': True, }, 'artifacts': { 'type': 'NO_ARTIFACTS', }, 'environment': None, 'serviceRole': 'arn:aws:iam::123124136734:role/CodeBuildServiceRole', 'badgeEnabled':",
"'windows-2015' }, ] # Fully populate the BUILDS list with all final build",
"parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials to use when creating the jobs')",
"'build': build_name, 'account': args.github_account, } # Replace all templates with the values above",
"created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name']",
"= boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find out which projects already exist",
"repo') parser.add_argument('project', type=str, help='The name of the repo to create the projects for')",
"build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects: print('{}: Updating'.format(build_name))",
"when creating the jobs') args = parser.parse_args() # The template for the arguments",
"'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2017:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM', 'environmentVariables': [],",
"project in existing_projects['projects']] # Actually create the projects for build_name, build in BUILDS.items():",
"= { 'linux': { 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode':",
"the repo') parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials to use when creating",
"'linux-gcc-4x-x86', 'env': 'linux' }, { 'build': 'linux-gcc-5x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-6x-x64',",
"return obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to",
"boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find out which projects already exist and",
"the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs', help='The GitHub account that owns the",
"in ~/.aws/credentials to use when creating the jobs') args = parser.parse_args() # The",
"The common enviroment objects to feed to CodeBuild ENVIRONMENTS = { 'linux': {",
"if build_name in new_projects: print('{}: Creating'.format(build_name)) codebuild.create_project(**build) codebuild.create_webhook(projectName=build_name) elif build_name in existing_projects: print('{}:",
"'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER',",
"passed to create_project CREATE_PARAM_TEMPLATE = { 'name': '{project}-{build}', 'source': { 'type': 'GITHUB', 'location':",
"must be created all_project_names = list(BUILDS.keys()) existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects",
"Reserved. # SPDX-License-Identifier: Apache-2.0. import argparse import boto3 # Parse required options parser",
"}, { 'build': 'windows-msvc-2015-x86', 'env': 'windows-2015' }, ] # Fully populate the BUILDS",
"owns the repo') parser.add_argument('--profile', type=str, default='default', help='The profile in ~/.aws/credentials to use when",
"configs paired with their environments BUILD_CONFIGS = [ { 'build': 'linux-clang3-x64', 'env': 'linux'",
"'build': 'linux-gcc-7x-x64', 'env': 'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build':",
"'linux' }, { 'build': 'windows-msvc-2017', 'env': 'windows-2017' }, { 'build': 'windows-msvc-2015', 'env': 'windows-2015'",
"CodeBuild projects for a repo') parser.add_argument('project', type=str, help='The name of the repo to",
"existing_projects = codebuild.batch_get_projects(names=all_project_names) new_projects = existing_projects['projectsNotFound'] existing_projects = [project['name'] for project in existing_projects['projects']]",
"'linux-clang3-x64', 'env': 'linux' }, { 'build': 'linux-clang6-x64', 'env': 'linux', 'privileged': True }, {",
"= session.client('codebuild') # Find out which projects already exist and should be updated,",
"obj.format(**sub_params) else: return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to codebuild",
"name of the repo to create the projects for') parser.add_argument('--github-account', type=str, dest='github_account', default='awslabs',",
"# SPDX-License-Identifier: Apache-2.0. import argparse import boto3 # Parse required options parser =",
"{ 'type': 'LINUX_CONTAINER', 'image': 'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017':",
"key, value in obj.items(): obj[key] = do_replace(value) return obj elif isinstance(obj, str): return",
"'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, { 'build':",
"return obj do_replace(build) BUILDS['{}-{}'.format(args.project, build_name)] = build # Connect to codebuild session =",
"of all of our build configs paired with their environments BUILD_CONFIGS = [",
"= parser.parse_args() # The template for the arguments to be passed to create_project",
"for a repo') parser.add_argument('project', type=str, help='The name of the repo to create the",
"# Connect to codebuild session = boto3.Session(profile_name=args.profile, region_name='us-east-1') codebuild = session.client('codebuild') # Find",
"'aws/codebuild/ubuntu-base:14.04', 'computeType': 'BUILD_GENERAL1_SMALL', 'environmentVariables': [], 'privilegedMode': False, }, 'windows-2017': { 'type': 'WINDOWS_CONTAINER', 'image':",
"{ 'build': 'linux-gcc-4x-x64', 'env': 'linux' }, { 'build': 'linux-gcc-4x-x86', 'env': 'linux' }, {",
"'environmentVariables': [], 'privilegedMode': False, }, 'windows-2015': { 'type': 'WINDOWS_CONTAINER', 'image': '123124136734.dkr.ecr.us-east-1.amazonaws.com/codebulid-windows-vs-2015:latest', 'computeType': 'BUILD_GENERAL1_MEDIUM',"
] |
[
"num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1]",
"exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda()",
"import rlkit.torch.pytorch_util as ptu # Data from observations import multi_mnist from torch.utils.data import",
"Python import argparse import joblib import yaml import os.path as osp from collections",
"print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for i in range(1):",
"TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'],",
"Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train = X_train[:,None,...] X_test =",
"enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs,",
"elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path,",
"global_iter += 1 if __name__ == '__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e',",
"global_iter = 0 for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4,",
"model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse",
"= X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks,",
"Model Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'],",
"import os.path as osp from collections import defaultdict import joblib import os #",
"lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for epoch in range(exp_specs['epochs']): train_loader =",
"average_over_batch=True ) loss = -1. * elbo loss = loss + 1. *",
"ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch,",
"multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]}",
"recon_log_cov = model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means + where_means, what_log_covs +",
"save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id =",
"where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean -",
"array from numpy.random import choice, randint # Model Building from gen_models.attentive_vae import AttentiveVAE",
"---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id,",
"# Model Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data",
"import yaml import os.path as osp from collections import defaultdict import joblib import",
"weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds,",
"# Python import argparse import joblib import yaml import os.path as osp from",
"if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # -------------------------------------------------------------------------",
"- img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for i",
"# save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if __name__ == '__main__':",
"= img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo,",
"setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) #",
"save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) #",
"X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test",
"in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter,",
"num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs,",
"for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch",
"DataLoader, TensorDataset # Logging from rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger, set_seed",
"os # PyTorch import torch import torch.nn as nn import torch.nn.functional as F",
"up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed)",
"elbo, KL = model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch,",
"if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov =",
"os.path as osp from collections import defaultdict import joblib import os # PyTorch",
"rlkit.torch.pytorch_util as ptu # Data from observations import multi_mnist from torch.utils.data import DataLoader,",
"import logger from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import",
"sum([m.mean() for m in masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] == 0:",
"* sum([m.mean() for m in masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] ==",
"Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.],",
"KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i)))",
"Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed']",
"Variable from torch import autograd from torch.optim import Adam # NumPy import numpy",
"ptu # Data from observations import multi_mnist from torch.utils.data import DataLoader, TensorDataset #",
"max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train",
"args = parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string = spec_file.read() exp_specs =",
"range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i)))",
"from torch.optim import Adam # NumPy import numpy as np from numpy import",
"from numpy import array from numpy.random import choice, randint # Model Building from",
"exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() #",
"numpy import array from numpy.random import choice, randint # Model Building from gen_models.attentive_vae",
"+= 1 if __name__ == '__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment',",
"parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string = spec_file.read() exp_specs = yaml.load(spec_string) experiment(exp_specs)",
"exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim",
"% KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter,",
"# Imports --------------------------------------------------------------------- # Python import argparse import joblib import yaml import os.path",
"observations import multi_mnist from torch.utils.data import DataLoader, TensorDataset # Logging from rlkit.core import",
"exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(),",
"= ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL)",
"model_optim.step() if global_iter % exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter %d...' %",
"as F from torch.autograd import Variable from torch import autograd from torch.optim import",
"i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter +=",
"import AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data from observations import multi_mnist from",
"# Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed =",
"def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix =",
"'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict =",
"= torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds",
"set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train,",
"logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix,",
"masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means + where_means,",
"Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path,",
"replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs,",
"img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means,",
"train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader):",
"exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd']))",
"torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable",
"-1. * elbo loss = loss + 1. * sum([m.mean() for m in",
"img_batch, average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' %",
"torch.utils.data import DataLoader, TensorDataset # Logging from rlkit.core import logger from rlkit.launchers.launcher_util import",
"variant=exp_specs) # Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test)",
"with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False)",
"as np from numpy import array from numpy.random import choice, randint # Model",
"exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data -----------------------------------------------------------",
"X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train),",
"os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train()",
"autograd from torch.optim import Adam # NumPy import numpy as np from numpy",
"recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f'",
"torch.autograd import Variable from torch import autograd from torch.optim import Adam # NumPy",
"Iter %d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch =",
"= img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks,",
"1. * sum([m.mean() for m in masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val']",
"recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss = -1. * elbo loss = loss",
"img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov",
"val_ds = TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48,",
"for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(),",
"from collections import defaultdict import joblib import os # PyTorch import torch import",
"img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means,",
"+ where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss = -1.",
"exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep",
"os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(),",
"= argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args = parser.parse_args() with open(args.experiment, 'r')",
"where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means",
"a in Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0,",
"from observations import multi_mnist from torch.utils.data import DataLoader, TensorDataset # Logging from rlkit.core",
"Num_test) # Model Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'],",
"exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the",
"img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda()",
"[1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]]",
"for a in Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test =",
"from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def",
"np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda()",
"-------------------------------------------------------- model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'],",
"collections import defaultdict import joblib import os # PyTorch import torch import torch.nn",
"48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] )",
"Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for epoch in range(exp_specs['epochs']): train_loader",
"exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer ---------------------------------------------------------------",
"model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter =",
"where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' %",
"Data from observations import multi_mnist from torch.utils.data import DataLoader, TensorDataset # Logging from",
"from rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import generate_gif,",
"# Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0",
"exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer",
"== 0: with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)),",
"import torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable from",
"# ------------------------------------------------------------------------- global_iter = 0 for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'],",
"defaultdict import joblib import os # PyTorch import torch import torch.nn as nn",
"Adam # NumPy import numpy as np from numpy import array from numpy.random",
") mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f'",
"joblib import yaml import os.path as osp from collections import defaultdict import joblib",
"NumPy import numpy as np from numpy import array from numpy.random import choice,",
"rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs):",
"48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs']",
"DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch",
"* elbo loss = loss + 1. * sum([m.mean() for m in masks])",
"as ptu # Data from observations import multi_mnist from torch.utils.data import DataLoader, TensorDataset",
"mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test)",
"seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data ----------------------------------------------------------- path",
"X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test)",
"------------------------------------------------------------------------- global_iter = 0 for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True,",
"{0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train])",
"= exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data ----------------------------------------------------------- path =",
"gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data from observations import multi_mnist",
"multi_mnist from torch.utils.data import DataLoader, TensorDataset # Logging from rlkit.core import logger from",
"% global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs]",
"epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num,",
"import array from numpy.random import choice, randint # Model Building from gen_models.attentive_vae import",
"ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed",
"num_batch) elbo, KL = model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov,",
"= DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch,",
"----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42,",
"torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test,",
"Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in",
"where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means +",
"X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds =",
"0 for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True)",
"masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter",
"i))) model.train() global_iter += 1 if __name__ == '__main__': # Arguments parser =",
"import Adam # NumPy import numpy as np from numpy import array from",
"KL = model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True",
"setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train),",
"recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means + where_means, what_log_covs",
"model.train() global_iter += 1 if __name__ == '__main__': # Arguments parser = argparse.ArgumentParser()",
"Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.],",
"as nn import torch.nn.functional as F from torch.autograd import Variable from torch import",
"shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0],",
"# PyTorch import torch import torch.nn as nn import torch.nn.functional as F from",
"'%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter",
"what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse =",
"mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' %",
"in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch",
"Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for",
"%d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs],",
"os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if __name__ == '__main__': # Arguments",
"= AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'],",
"in masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating",
"from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data from observations import",
"# Model Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'],",
"[1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a",
"= model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True )",
"model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for epoch in",
"save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1",
"print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter,",
"--------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for epoch",
"import DataLoader, TensorDataset # Logging from rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger,",
"size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means,",
"set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set",
"+ where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f'",
"np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test",
"# Data from observations import multi_mnist from torch.utils.data import DataLoader, TensorDataset # Logging",
"= exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) #",
"import Variable from torch import autograd from torch.optim import Adam # NumPy import",
"train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model",
"data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48,",
"model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean,",
"+ 1. * sum([m.mean() for m in masks]) loss.backward() model_optim.step() if global_iter %",
"torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds =",
"img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for i in",
"--------------------------------------------------------------------- # Python import argparse import joblib import yaml import os.path as osp",
"yaml import os.path as osp from collections import defaultdict import joblib import os",
"= 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict",
"i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path,",
"'__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args =",
"recon_log_cov, img_batch, average_over_batch=True ) loss = -1. * elbo loss = loss +",
"canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train =",
"import choice, randint # Model Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as",
"Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data from observations",
"X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean,",
"import setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu'])",
"exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv'",
"0: with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'],",
"i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter,",
"osp from collections import defaultdict import joblib import os # PyTorch import torch",
"parser.add_argument('-e', '--experiment', help='experiment specification file') args = parser.parse_args() with open(args.experiment, 'r') as spec_file:",
"= TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model =",
"% exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval() idxs",
"'%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if __name__ == '__main__': # Arguments parser",
"Num_train) val_ds = TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model = AttentiveVAE( [1,",
"what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL = model.compute_ELBO(",
"= multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.], 2:",
"# Logging from rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools",
"exp_id=exp_id, variant=exp_specs) # Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test,",
"global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if",
"path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False)",
"joblib import os # PyTorch import torch import torch.nn as nn import torch.nn.functional",
"file') args = parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string = spec_file.read() exp_specs",
"idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch",
"% elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(),",
"Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args = parser.parse_args() with",
"'%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if __name__",
"exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs)",
"in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train = X_train[:,None,...] X_test",
"sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix",
"# Prep the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) =",
"torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) # Model Definition --------------------------------------------------------",
"__name__ == '__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file')",
"np from numpy import array from numpy.random import choice, randint # Model Building",
"choice, randint # Model Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu",
"exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim =",
"+ where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss = -1. * elbo loss",
"TensorDataset # Logging from rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger, set_seed from",
"1 if __name__ == '__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment",
"X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train,",
"model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled():",
"= Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter = 0 for epoch in range(exp_specs['epochs']):",
"exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if",
"print('\\nValidating Iter %d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch",
"model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'],",
"PyTorch import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd",
"torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval() idxs = np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch,",
"% mse) print('KL:\\t%.4f' % KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i)))",
"Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...]",
"iter_num, img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch =",
"import autograd from torch.optim import Adam # NumPy import numpy as np from",
"(X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0: [0.,0.], 1:",
"AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data from observations import multi_mnist from torch.utils.data",
"Model Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu # Data from",
"+ where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean",
"torch.nn as nn import torch.nn.functional as F from torch.autograd import Variable from torch",
"where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss = -1. *",
"torch import autograd from torch.optim import Adam # NumPy import numpy as np",
"import joblib import os # PyTorch import torch import torch.nn as nn import",
"'%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path,",
"loss + 1. * sum([m.mean() for m in masks]) loss.backward() model_optim.step() if global_iter",
"exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled():",
"drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled():",
"save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path, '%d_%d_recon.png'%(global_iter, i))) save_pytorch_tensor_as_img(masks[0][i].data.cpu(), os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i)))",
"img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch)",
"num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs,",
"((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse) print('KL:\\t%.4f' % KL) for",
"m in masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] == 0: with torch.no_grad():",
"what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL =",
"randint # Model Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util as ptu #",
"parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args = parser.parse_args() with open(args.experiment,",
"exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval() idxs =",
"for m in masks]) loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] == 0: with",
"if global_iter % exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter)",
"torch.optim import Adam # NumPy import numpy as np from numpy import array",
"generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id",
"exp_specs['vae_specs']['z_obj_recon_upconv_specs'], exp_specs['vae_specs']['recon_upconv_part_specs'] ) if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']),",
"ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) # ------------------------------------------------------------------------- global_iter",
"= X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test =",
"rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging",
"if __name__ == '__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification",
"in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means,",
"i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if __name__ ==",
"import defaultdict import joblib import os # PyTorch import torch import torch.nn as",
"pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch = img_batch[0], img_batch[1] if",
"# NumPy import numpy as np from numpy import array from numpy.random import",
"print('KL:\\t%.4f' % KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(), os.path.join(path,",
"= loss + 1. * sum([m.mean() for m in masks]) loss.backward() model_optim.step() if",
"= model(img_batch, num_batch) elbo, KL = model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs,",
"(X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2, canvas_size=48, seed=42, use_max=False) convert_dict = {0:",
"elbo loss = loss + 1. * sum([m.mean() for m in masks]) loss.backward()",
"= -1. * elbo loss = loss + 1. * sum([m.mean() for m",
"numpy.random import choice, randint # Model Building from gen_models.attentive_vae import AttentiveVAE import rlkit.torch.pytorch_util",
"img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov = model(img_batch, num_batch) elbo, KL",
"for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for",
"what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss =",
"import numpy as np from numpy import array from numpy.random import choice, randint",
"X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train)",
"argparse import joblib import yaml import os.path as osp from collections import defaultdict",
"import torch.nn.functional as F from torch.autograd import Variable from torch import autograd from",
"recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo)",
"import torch import torch.nn as nn import torch.nn.functional as F from torch.autograd import",
"average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean() print('ELBO:\\t%.4f' % elbo) print('MSE:\\t%.4f' % mse)",
"batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in enumerate(train_loader): img_batch, num_batch =",
"loss = loss + 1. * sum([m.mean() for m in masks]) loss.backward() model_optim.step()",
"= parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string = spec_file.read() exp_specs = yaml.load(spec_string)",
"1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test =",
"in Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0",
"= np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train,",
"global_iter % exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter %d...' % global_iter) model.eval()",
"where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss = -1. * elbo loss =",
"'--experiment', help='experiment specification file') args = parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string",
"= np.random.choice(int(X_test.size(0)), size=exp_specs['batch_size'], replace=False) img_batch, num_batch = X_test[idxs], Num_test[idxs] if ptu.gpu_enabled(): img_batch =",
"logger from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys",
") loss = -1. * elbo loss = loss + 1. * sum([m.mean()",
"import argparse import joblib import yaml import os.path as osp from collections import",
"a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train = X_train[:,None,...]",
"img_batch[0], img_batch[1] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean,",
"as osp from collections import defaultdict import joblib import os # PyTorch import",
"2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for",
"help='experiment specification file') args = parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string =",
"model.compute_ELBO( what_means + where_means, what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss",
"= np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test])",
"numpy as np from numpy import array from numpy.random import choice, randint #",
"from numpy.random import choice, randint # Model Building from gen_models.attentive_vae import AttentiveVAE import",
"TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model = AttentiveVAE(",
"nn import torch.nn.functional as F from torch.autograd import Variable from torch import autograd",
"loss = -1. * elbo loss = loss + 1. * sum([m.mean() for",
"save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if __name__ == '__main__': #",
"import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id']",
"import os # PyTorch import torch import torch.nn as nn import torch.nn.functional as",
"from torch import autograd from torch.optim import Adam # NumPy import numpy as",
"torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) # Model Definition",
"Logging from rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import",
"[1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'], exp_specs['vae_specs']['z_obj_recon_upconv_specs'],",
"os.path.join(path, '%d_%d_mask_0.png'%(global_iter, i))) # save_pytorch_tensor_as_img(masks[1][i].data.cpu(), os.path.join(path, '%d_%d_mask_1.png'%(global_iter, i))) model.train() global_iter += 1 if",
"Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'],",
"img_batch, average_over_batch=True ) loss = -1. * elbo loss = loss + 1.",
"from torch.utils.data import DataLoader, TensorDataset # Logging from rlkit.core import logger from rlkit.launchers.launcher_util",
"rlkit.core import logger from rlkit.launchers.launcher_util import setup_logger, set_seed from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img",
"= X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train, mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds",
"range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False, drop_last=True) for iter_num, img_batch in",
"loss.backward() model_optim.step() if global_iter % exp_specs['freq_val'] == 0: with torch.no_grad(): print('\\nValidating Iter %d...'",
"[0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test",
"Imports --------------------------------------------------------------------- # Python import argparse import joblib import yaml import os.path as",
"import multi_mnist from torch.utils.data import DataLoader, TensorDataset # Logging from rlkit.core import logger",
"AttentiveVAE( [1, 48, 48], exp_specs['vae_specs']['z_dim'], exp_specs['vae_specs']['x_encoder_specs'], exp_specs['vae_specs']['z_seg_conv_specs'], exp_specs['vae_specs']['z_seg_fc_specs'], exp_specs['vae_specs']['z_obj_conv_specs'], exp_specs['vae_specs']['z_obj_fc_specs'], exp_specs['vae_specs']['z_seg_recon_fc_specs'], exp_specs['vae_specs']['z_seg_recon_upconv_specs'], exp_specs['vae_specs']['z_obj_recon_fc_specs'],",
"seed=42, use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]]",
"torch.nn.functional as F from torch.autograd import Variable from torch import autograd from torch.optim",
"== '__main__': # Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args",
"mask_test = torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) #",
"for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train =",
"experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ---------------------------------------------------------- exp_id = exp_specs['exp_id'] exp_prefix = exp_specs['exp_name']",
"Y_test]) X_train = X_train[:,None,...] X_test = X_test[:,None,...] X_train, X_test = torch.FloatTensor(X_train)/255.0, torch.FloatTensor(X_test)/255.0 mask_train,",
"the data ----------------------------------------------------------- path = 'junk_vis/debug_att_vae_shallower_48_64_dim_0p1_kl_stronger_seg_conv' (X_train, Y_train), (X_test, Y_test) = multi_mnist(path, max_digits=2,",
"what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) mse = ((recon_mean - img_batch)**2).mean()",
"np.array([convert_dict[a.shape[0]] for a in Y_train]) Num_test = np.array([convert_dict[a.shape[0]] for a in Y_test]) X_train",
"= torch.FloatTensor(Num_train), torch.FloatTensor(Num_test) train_ds = TensorDataset(X_train, Num_train) val_ds = TensorDataset(X_test, Num_test) # Model",
"convert_dict = {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a",
"from rlkit.core.vistools import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up",
"from torch.autograd import Variable from torch import autograd from torch.optim import Adam #",
"import generate_gif, save_pytorch_tensor_as_img import sys def experiment(exp_specs): ptu.set_gpu_mode(exp_specs['use_gpu']) # Set up logging ----------------------------------------------------------",
"= exp_specs['exp_name'] seed = exp_specs['seed'] set_seed(seed) setup_logger(exp_prefix=exp_prefix, exp_id=exp_id, variant=exp_specs) # Prep the data",
") if ptu.gpu_enabled(): model.cuda() # Optimizer --------------------------------------------------------------- model_optim = Adam(model.parameters(), lr=float(exp_specs['model_lr']), weight_decay=float(exp_specs['model_wd'])) #",
"F from torch.autograd import Variable from torch import autograd from torch.optim import Adam",
"= 0 for epoch in range(exp_specs['epochs']): train_loader = DataLoader(train_ds, batch_size=exp_specs['batch_size'], shuffle=True, num_workers=4, pin_memory=False,",
"what_log_covs + where_log_covs, recon_mean, recon_log_cov, img_batch, average_over_batch=True ) loss = -1. * elbo",
"argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args = parser.parse_args() with open(args.experiment, 'r') as",
"specification file') args = parser.parse_args() with open(args.experiment, 'r') as spec_file: spec_string = spec_file.read()",
"# Arguments parser = argparse.ArgumentParser() parser.add_argument('-e', '--experiment', help='experiment specification file') args = parser.parse_args()",
"mse) print('KL:\\t%.4f' % KL) for i in range(1): save_pytorch_tensor_as_img(img_batch[i].data.cpu(), os.path.join(path, '%d_%d_img.png'%(global_iter, i))) save_pytorch_tensor_as_img(recon_mean[i].data.cpu(),",
"import joblib import yaml import os.path as osp from collections import defaultdict import",
"= TensorDataset(X_test, Num_test) # Model Definition -------------------------------------------------------- model = AttentiveVAE( [1, 48, 48],",
"= {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for a in",
"use_max=False) convert_dict = {0: [0.,0.], 1: [1.,0.], 2: [1.,1.]} Num_train = np.array([convert_dict[a.shape[0]] for",
"Num_test[idxs] if ptu.gpu_enabled(): img_batch = img_batch.cuda() what_means, what_log_covs, where_means, where_log_covs, masks, recon_mean, recon_log_cov"
] |
[
"data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data return wrapped_function def",
"<filename>minder_utils/util/decorators/file_func.py from functools import wraps from minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util",
"the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data return wrapped_function",
"__init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose =",
"except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data = func(*args, **kwargs) save_file(data, self.save_path,",
"= load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the",
"save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path class load_save: def __init__(self, save_path, save_name=None,",
"**kwargs): if self.refresh: self.print_func(func, 'start to refresh the data') data = func(*args, **kwargs)",
"try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func,",
"save_file, load_file from minder_utils.util.util import reformat_path class load_save: def __init__(self, save_path, save_name=None, verbose=True,",
"self.refresh = refresh def __call__(self, func): self.file_name = func.__name__ if self.file_name is None",
"minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path class load_save: def __init__(self,",
"processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data = func(*args, **kwargs)",
"data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try: data = load_file(self.save_path,",
"save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose = verbose self.refresh",
"= reformat_path(save_path) self.file_name = save_name self.verbose = verbose self.refresh = refresh def __call__(self,",
"self.print_func(func, 'processing the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data",
"data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data = func(*args, **kwargs) save_file(data,",
"func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data return wrapped_function def print_func(self, func, message):",
"self.save_path, self.file_name) else: try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except",
"self.print_func(func, 'start to refresh the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name)",
"verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose = verbose self.refresh =",
"to refresh the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try:",
"save_file(data, self.save_path, self.file_name) return data return wrapped_function def print_func(self, func, message): if self.verbose:",
"import wraps from minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path class",
"data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data return wrapped_function def print_func(self,",
"save_name self.verbose = verbose self.refresh = refresh def __call__(self, func): self.file_name = func.__name__",
"wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to refresh the data') data = func(*args,",
"reformat_path class load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name",
"the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try: data =",
"self.file_name is None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start",
"minder_utils.util.util import reformat_path class load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path =",
"= refresh def __call__(self, func): self.file_name = func.__name__ if self.file_name is None else",
"verbose self.refresh = refresh def __call__(self, func): self.file_name = func.__name__ if self.file_name is",
"else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to refresh the",
"self.file_name) else: try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError:",
"**kwargs) save_file(data, self.save_path, self.file_name) return data return wrapped_function def print_func(self, func, message): if",
"= func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try: data = load_file(self.save_path, self.file_name) self.print_func(func,",
"refresh the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try: data",
"save_file(data, self.save_path, self.file_name) else: try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data')",
"def __call__(self, func): self.file_name = func.__name__ if self.file_name is None else self.file_name @wraps(func)",
"'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data = func(*args,",
"func.__name__ if self.file_name is None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh:",
"self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data",
"self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose = verbose self.refresh = refresh def",
"self.save_path, self.file_name) return data return wrapped_function def print_func(self, func, message): if self.verbose: print(str(func.__name__).ljust(20,",
"self.file_name = func.__name__ if self.file_name is None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs):",
"class load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name =",
"is None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to",
"import save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path class load_save: def __init__(self, save_path,",
"def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to refresh the data') data =",
"func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading",
"= save_name self.verbose = verbose self.refresh = refresh def __call__(self, func): self.file_name =",
"None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to refresh",
"FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name)",
"'start to refresh the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else:",
"@wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to refresh the data') data",
"from minder_utils.util.util import reformat_path class load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path",
"reformat_path(save_path) self.file_name = save_name self.verbose = verbose self.refresh = refresh def __call__(self, func):",
"from functools import wraps from minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util import",
"= func.__name__ if self.file_name is None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if",
"def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose",
"func): self.file_name = func.__name__ if self.file_name is None else self.file_name @wraps(func) def wrapped_function(*args,",
"load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data')",
"data return wrapped_function def print_func(self, func, message): if self.verbose: print(str(func.__name__).ljust(20, ' '), message)",
"if self.file_name is None else self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func,",
"self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing the data') data =",
"self.file_name @wraps(func) def wrapped_function(*args, **kwargs): if self.refresh: self.print_func(func, 'start to refresh the data')",
"save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose = verbose",
"refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name self.verbose = verbose self.refresh = refresh",
"else: try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path)",
"refresh def __call__(self, func): self.file_name = func.__name__ if self.file_name is None else self.file_name",
"= verbose self.refresh = refresh def __call__(self, func): self.file_name = func.__name__ if self.file_name",
"self.verbose = verbose self.refresh = refresh def __call__(self, func): self.file_name = func.__name__ if",
"self.file_name) return data return wrapped_function def print_func(self, func, message): if self.verbose: print(str(func.__name__).ljust(20, '",
"data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) else: try: data = load_file(self.save_path, self.file_name)",
"'processing the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data return",
"wraps from minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path class load_save:",
"__call__(self, func): self.file_name = func.__name__ if self.file_name is None else self.file_name @wraps(func) def",
"**kwargs) save_file(data, self.save_path, self.file_name) else: try: data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed",
"from minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path class load_save: def",
"functools import wraps from minder_utils.util.util import save_mkdir, save_file, load_file from minder_utils.util.util import reformat_path",
"self.file_name = save_name self.verbose = verbose self.refresh = refresh def __call__(self, func): self.file_name",
"self.refresh: self.print_func(func, 'start to refresh the data') data = func(*args, **kwargs) save_file(data, self.save_path,",
"load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path) self.file_name = save_name",
"= func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return data return wrapped_function def print_func(self, func,",
"save_mkdir(self.save_path) self.print_func(func, 'processing the data') data = func(*args, **kwargs) save_file(data, self.save_path, self.file_name) return",
"return data return wrapped_function def print_func(self, func, message): if self.verbose: print(str(func.__name__).ljust(20, ' '),",
"if self.refresh: self.print_func(func, 'start to refresh the data') data = func(*args, **kwargs) save_file(data,",
"data = load_file(self.save_path, self.file_name) self.print_func(func, 'loading processed data') except FileNotFoundError: save_mkdir(self.save_path) self.print_func(func, 'processing",
"load_file from minder_utils.util.util import reformat_path class load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False):",
"import reformat_path class load_save: def __init__(self, save_path, save_name=None, verbose=True, refresh=False): self.save_path = reformat_path(save_path)"
] |
[
"''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 =",
"of 32 in paper n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ active, using",
"discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 =",
"= lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8,",
"= batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64",
"tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if",
"s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2,",
"self_h0 = tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0))",
"conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does",
"deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return",
"vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3')",
"= batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64",
"2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z`",
"w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2,",
"#I think this is equivalent to just not letting disc optimize first layer",
"lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits =",
"name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2, [batch_size, s_h2,",
"h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1,",
"import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride):",
"batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1",
"[batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b =",
"vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image,",
"to a 64x64 images with values in [-1,1] uses batch normalization internally '''",
"h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_,",
"return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1",
"h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs)",
"in paper n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\")",
"[-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels = tf.nn.tanh(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return",
"h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label')",
": first layer activation used to estimate z from : variables list '''",
"around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0')",
"padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128",
"s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image,",
"tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0",
"= tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN",
"d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to",
"= batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0,",
"def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1')",
"config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:])",
"h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1],",
"batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1",
"is equivalent to just not letting disc optimize first layer #and also removing",
"s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1,",
"[batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b =",
"stddev=0.02, #paper used 8x8 kernel, but I'm using 5x5 because it is more",
"d_w=2, stddev=0.02, #paper used 8x8 kernel, but I'm using 5x5 because it is",
"h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4,",
"name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim,",
"variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse)",
"because it is more similar to my achitecture #n_projs=config.df_dim#64 instead of 32 in",
"batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc",
"dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels = tf.nn.tanh(D_labels_logits) variables =",
"d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0",
"h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs)",
"= tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2,",
"import numpy as np slim = tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features",
"image config : see causal_dcgan/config.py reuse : pass True if not calling for",
"= batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64",
"optimize first layer #and also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper",
"conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project",
"tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1],",
"as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 =",
"discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2",
"s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2)",
"h4, h4_w, h4_b = deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4)",
"with values in [-1,1] uses batch normalization internally ''' #trying to get around",
"s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d(",
"dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return",
"with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3')",
"= tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config,",
"paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False)",
"#dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES)",
"D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None):",
"= tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def",
"config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat,",
"config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3",
"I'm using 5x5 because it is more similar to my achitecture #n_projs=config.df_dim#64 instead",
"h1_w, h1_b = deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 =",
"this is equivalent to just not letting disc optimize first layer #and also",
"lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels =",
"config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2,",
"but I'm using 5x5 because it is more similar to my achitecture #n_projs=config.df_dim#64",
"h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1,",
"#batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 =",
"= tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0]",
"2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ =",
"dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128",
"z, config, reuse=None): ''' maps z to a 64x64 images with values in",
"name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables",
"a 64x64 images with values in [-1,1] uses batch normalization internally ''' #trying",
"vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj:",
"disc from winning #I think this is equivalent to just not letting disc",
"return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as",
"from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2",
"batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 =",
"to my achitecture #n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64 instead of 32",
"= batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w,",
"h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3, [batch_size, s_h, s_w, config.c_dim],",
"used to estimate z from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs:",
"= lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits",
"def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN model. image : batch_size x",
"h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b",
"conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config, reuse=None):",
"output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 =",
"= conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z` and reshape z_, self_h0_w, self_h0_b",
"= tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1,",
"h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b",
"math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return",
"= linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16, s_w16,",
"h4, h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs:",
"z_, [-1, s_h16, s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b",
"h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label')",
"with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3')",
"config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3,",
"with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config, reuse=None): '''",
"linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim",
"= batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0,",
"2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2),",
"config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:])",
"to just not letting disc optimize first layer #and also removing nonlinearity #k_h=5,",
"5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj",
"/ float(stride))) def GeneratorCNN( z, config, reuse=None): ''' maps z to a 64x64",
"config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3",
"h4_w, h4_b = deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables",
"[n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim)",
"[5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME')",
"batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1",
": pass True if not calling for first time returns: probabilities(real) : logits(real)",
"tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size,",
"tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w,",
"[-1, s_h16, s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b =",
"more similar to my achitecture #n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64 instead",
"= lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8,",
"as tf import numpy as np slim = tf.contrib.slim import math from causal_dcgan.ops",
"from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN(",
": see causal_dcgan/config.py reuse : pass True if not calling for first time",
"tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0",
"lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def",
"s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d(",
"initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj',",
"config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1, [batch_size,",
"h3_b = deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3))",
"s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8,",
"linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables",
"[batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables",
"= batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64",
"g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim,",
"with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3, [batch_size, s_h, s_w,",
"g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2",
"= lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4,",
"name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 =",
"h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels = tf.nn.tanh(D_labels_logits) variables = tf.contrib.framework.get_variables(vs)",
"maps z to a 64x64 images with values in [-1,1] uses batch normalization",
"image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj =",
"name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels = tf.nn.tanh(D_labels_logits) variables",
"config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim * 8])",
"letting disc optimize first layer #and also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2,",
"normalization internally ''' #trying to get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with",
"batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 =",
"s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables def",
"similar to my achitecture #n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64 instead of",
"name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3,",
"numpy as np slim = tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from",
"z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim *",
"2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z` and reshape",
"batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method",
"tensorflow as tf import numpy as np slim = tf.contrib.slim import math from",
"restrict disc from winning #I think this is equivalent to just not letting",
"from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size)",
"reuse : pass True if not calling for first time returns: probabilities(real) :",
"int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config, reuse=None): ''' maps z to a",
"conv_out_size_same(s_w8, 2) # project `z` and reshape z_, self_h0_w, self_h0_b = linear( z,",
"tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN model.",
"out, variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN model. image :",
"lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv')))",
"config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels = tf.nn.tanh(D_labels_logits)",
"h1, h1_w, h1_b = deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1",
"kernel, but I'm using 5x5 because it is more similar to my achitecture",
"= tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2',",
"prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def discriminator_labeler(image, output_dim, config,",
"2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 =",
"h3_w, h3_b = deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 =",
"config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3, [batch_size,",
"just not letting disc optimize first layer #and also removing nonlinearity #k_h=5, k_w=5,",
"def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2",
"config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3",
"tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False)",
"config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning #I think this is equivalent",
"batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2,",
"= tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 =",
"output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def",
"= deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4,",
"name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning #I think this is equivalent to",
"list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3",
"# project `z` and reshape z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin',",
"h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob,",
"initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 =",
"= tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0]",
"#trying to get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs:",
"g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h,",
"tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True)",
"uses batch normalization internally ''' #trying to get around batch_size like this: batch_size=tf.shape(z)[0]",
"conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z` and",
"= deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3,",
"#paper used 8x8 kernel, but I'm using 5x5 because it is more similar",
"= add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv')))",
"return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config, reuse=None): ''' maps z to",
"config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config,",
"name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 =",
"variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None):",
"n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj =",
"calling for first time returns: probabilities(real) : logits(real) : first layer activation used",
"#print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables =",
"lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits =",
"h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2],",
"config, reuse=None): ''' maps z to a 64x64 images with values in [-1,1]",
"config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning #I",
"batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2",
"h2_b = deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2))",
"* 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0, [batch_size, s_h8,",
"h2, h2_w, h2_b = deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2",
"s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4,",
"name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 =",
"s_h16, s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d(",
"d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim,",
"with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2')",
"h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning #I think",
": logits(real) : first layer activation used to estimate z from : variables",
"= lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels",
"= deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2,",
"z to a 64x64 images with values in [-1,1] uses batch normalization internally",
"= tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4',",
"h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b",
"float(stride))) def GeneratorCNN( z, config, reuse=None): ''' maps z to a 64x64 images",
"GeneratorCNN( z, config, reuse=None): ''' maps z to a 64x64 images with values",
"name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin')",
"conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8",
"def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1')",
"think this is equivalent to just not letting disc optimize first layer #and",
"to get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0",
"lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels =",
"using 5x5 because it is more similar to my achitecture #n_projs=config.df_dim#64 instead of",
"model. image : batch_size x 64x64x3 image config : see causal_dcgan/config.py reuse :",
"True if not calling for first time returns: probabilities(real) : logits(real) : first",
"s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z` and reshape z_, self_h0_w,",
"not calling for first time returns: probabilities(real) : logits(real) : first layer activation",
"D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs:",
"dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 =",
"batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2,",
"= batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2",
"also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel, but",
"to restrict disc from winning #I think this is equivalent to just not",
"for first time returns: probabilities(real) : logits(real) : first layer activation used to",
"config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2')",
"tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs:",
"= batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2",
"deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w,",
"image : batch_size x 64x64x3 image config : see causal_dcgan/config.py reuse : pass",
"#print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4)",
"batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2,",
"config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 =",
"dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables =",
"',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables",
"slim = tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d",
"= conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8,",
"disc optimize first layer #and also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02,",
"dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 =",
"output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 =",
"[-1,dim3]) h4 = linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4,",
"lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248",
"my achitecture #n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64 instead of 32 in",
"internally ''' #trying to get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse)",
"z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_,",
": variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 =",
"name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim,",
"tf import numpy as np slim = tf.contrib.slim import math from causal_dcgan.ops import",
"deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w,",
"batch normalization internally ''' #trying to get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs')",
"= batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0 =",
"config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2, [batch_size,",
"s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z` and reshape z_,",
"h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out,",
"lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:])",
"[-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return",
"= linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,",
"reuse=None): ''' maps z to a 64x64 images with values in [-1,1] uses",
"= conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16,",
"tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with",
"with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2, [batch_size, s_h2, s_w2,",
"lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits =",
"conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) # project `z` and reshape z_, self_h0_w, self_h0_b =",
"D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim,",
"reshape z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape(",
"lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning #I think this is",
"it is more similar to my achitecture #n_projs=config.df_dim#64 instead of 32 in paper",
"from winning #I think this is equivalent to just not letting disc optimize",
"first time returns: probabilities(real) : logits(real) : first layer activation used to estimate",
"name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3, [batch_size, s_h,",
"like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 =",
"nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel, but I'm using",
"D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1')",
"name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config, reuse=None):",
"variables = tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for",
"linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def",
"GAN model. image : batch_size x 64x64x3 image config : see causal_dcgan/config.py reuse",
"D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 =",
"deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w,",
"reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3",
"#and also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel,",
"config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2')",
"variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 =",
"config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat,",
"D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels,",
"as np slim = tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops",
"dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim,",
"causal_dcgan/config.py reuse : pass True if not calling for first time returns: probabilities(real)",
"import tensorflow as tf import numpy as np slim = tf.contrib.slim import math",
"2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0,",
"output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image,",
"= tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1',",
"d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel, but I'm using 5x5 because it",
"1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def discriminator_labeler(image,",
"= tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3',",
"layer activation used to estimate z from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse)",
"= lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248",
": batch_size x 64x64x3 image config : see causal_dcgan/config.py reuse : pass True",
"8x8 kernel, but I'm using 5x5 because it is more similar to my",
"h1_b = deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1))",
"used 8x8 kernel, but I'm using 5x5 because it is more similar to",
"= lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8,",
"batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h,",
"= lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels",
"see causal_dcgan/config.py reuse : pass True if not calling for first time returns:",
"layer #and also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8",
"self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1,",
"import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride)))",
"h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label')",
"else:#method to restrict disc from winning #I think this is equivalent to just",
"probabilities(real) : logits(real) : first layer activation used to estimate z from :",
"activation used to estimate z from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as",
"Discriminator for GAN model. image : batch_size x 64x64x3 image config : see",
"config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits)",
"if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from",
"h4_b = deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables =",
"values in [-1,1] uses batch normalization internally ''' #trying to get around batch_size",
"using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image,",
"= lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels",
"winning #I think this is equivalent to just not letting disc optimize first",
"tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3",
"lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv')))",
"= tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as",
"= lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4,",
"config, reuse=None): ''' Discriminator for GAN model. image : batch_size x 64x64x3 image",
"pass True if not calling for first time returns: probabilities(real) : logits(real) :",
"lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3",
"g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2),",
"in [-1,1] uses batch normalization internally ''' #trying to get around batch_size like",
"config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat, 1,",
"variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0]",
"batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2,",
"lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv')))",
"h3, h3_w, h3_b = deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3",
"to estimate z from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1",
"''' Discriminator for GAN model. image : batch_size x 64x64x3 image config :",
"discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2",
"= lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning #I think this",
"tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse)",
"h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3])",
"config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits)",
"lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 = linear(h3_flat,",
"reuse=None): ''' Discriminator for GAN model. image : batch_size x 64x64x3 image config",
"lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248",
"s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3,",
"self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16,",
"= lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4,",
"1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2,",
"def GeneratorCNN( z, config, reuse=None): ''' maps z to a 64x64 images with",
"'d_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables def discriminator_labeler(image, output_dim,",
"= deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True) out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs)",
"dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 =",
"time returns: probabilities(real) : logits(real) : first layer activation used to estimate z",
"h2_w, h2_b = deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 =",
"lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels =",
"s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2)",
"first layer #and also removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used",
"#n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ",
"''' #trying to get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as",
"causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) /",
"prob, h4, h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as",
"with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3')",
"s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4",
"tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0",
"batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2)",
"conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16",
"stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config, reuse=None): ''' maps z",
"paper n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj",
"config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:])",
"tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1, [batch_size, s_h4, s_w4, config.gf_dim*2], name='g_h2', with_w=True)",
"reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3",
"'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config,",
"= linear(h3_flat, 1, 'd_h3_lin') prob=tf.nn.sigmoid(h4) variables = tf.contrib.framework.get_variables(vs,collection=tf.GraphKeys.TRAINABLE_VARIABLES) return prob, h4, h1_, variables",
"w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj)",
"D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1",
"batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 =",
"h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4",
"[batch_size, s_h8, s_w8, config.gf_dim*4], name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b =",
"s_w16, config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0,",
"instead of 32 in paper n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ active,",
"h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0, [batch_size, s_h8, s_w8, config.gf_dim*4],",
"config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat,",
"get around batch_size like this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 =",
"h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8,",
"[-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return",
"tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d( h3, [batch_size, s_h, s_w, config.c_dim], name='g_h4', with_w=True)",
"as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not",
"print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv",
"s_w4, config.gf_dim*2], name='g_h2', with_w=True) h2 = tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2,",
"of 32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5,",
"projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj, strides=[1,",
"name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2,",
"in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs],",
"images with values in [-1,1] uses batch normalization internally ''' #trying to get",
"for GAN model. image : batch_size x 64x64x3 image config : see causal_dcgan/config.py",
"nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_, config.df_dim) h2",
"h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1,",
"= batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict",
"= batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w",
"\",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv = tf.nn.conv2d(image, w_proj,",
"tf.nn.relu(g_bn2(h2)) h3, h3_w, h3_b = deconv2d( h2, [batch_size, s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True)",
"config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2),",
"32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5,",
"first layer activation used to estimate z from : variables list ''' with",
"b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1",
"config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list())",
"h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8)",
"active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj', [5, 5, image.get_shape()[-1],n_projs], initializer=tf.truncated_normal_initializer(stddev=0.02),trainable=False) conv =",
"32 in paper n_projs=config.n_stab_proj#64 instead of 32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\"",
"variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 =",
"return prob, h4, h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse)",
"2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2),",
"conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4,",
"variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2')",
"return out, variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN model. image",
"variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN model. image : batch_size",
"D_labels_logits = linear(h3_flat, config.z_dim, 'dzl_h3_Label') D_labels = tf.nn.tanh(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels,variables",
"= lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dzl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits",
"64x64x3 image config : see causal_dcgan/config.py reuse : pass True if not calling",
"name='g_h1', with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1, [batch_size, s_h4,",
"add_minibatch_features(h1_, config.df_dim) h2 = lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape:",
"this: batch_size=tf.shape(z)[0] #batch_size=tf.placeholder_with_default(64,[],'bs') with tf.variable_scope(\"generator\",reuse=reuse) as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1')",
"h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3])",
"k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel, but I'm using 5x5 because",
"= batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 =",
"equivalent to just not letting disc optimize first layer #and also removing nonlinearity",
"def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config, reuse=None): '''",
"config : see causal_dcgan/config.py reuse : pass True if not calling for first",
"conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config, reuse=None): ''' maps",
"instead of 32 in paper print(\"WARNING:STAB_PROJ active, using \",n_projs,\" projections\") w_proj = tf.get_variable('w_proj',",
"lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248",
"batch_size x 64x64x3 image config : see causal_dcgan/config.py reuse : pass True if",
"[-1,1] uses batch normalization internally ''' #trying to get around batch_size like this:",
"not config.stab_proj: h0 = lrelu(conv2d(image, config.df_dim, name='d_h0_conv'))#16,32,32,64 else:#method to restrict disc from winning",
"'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image,",
"s_h, s_w = config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4,",
"64x64 images with values in [-1,1] uses batch normalization internally ''' #trying to",
"z from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 = batch_norm(name='d_bn1')",
"import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z, config,",
"= tf.nn.conv2d(image, w_proj, strides=[1, 2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing",
"8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0, [batch_size, s_h8, s_w8,",
"tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_ = lrelu(d_bn1(conv2d(h0, config.df_dim*2, name='d_h1_conv')))#16,16,16,128 h1 = add_minibatch_features(h1_,",
"batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 = batch_norm(name='g_bn3') s_h, s_w =",
"returns: probabilities(real) : logits(real) : first layer activation used to estimate z from",
"removing nonlinearity #k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel, but I'm",
"<gh_stars>0 import tensorflow as tf import numpy as np slim = tf.contrib.slim import",
"reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3",
"2) # project `z` and reshape z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16,",
"h1_, variables def discriminator_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1",
"batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 =",
"name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim,",
"causal_dcgan.ops import conv2d,deconv2d def conv_out_size_same(size, stride): return int(math.ceil(float(size) / float(stride))) def GeneratorCNN( z,",
"dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128",
"= conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8, 2), conv_out_size_same(s_w8, 2) #",
"= batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64",
"batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_gen_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 =",
"= batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0,",
"= lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) h4 =",
"conv_out_size_same(s_w2, 2) s_h8, s_w8 = conv_out_size_same(s_h4, 2), conv_out_size_same(s_w4, 2) s_h16, s_w16 = conv_out_size_same(s_h8,",
"2), conv_out_size_same(s_w8, 2) # project `z` and reshape z_, self_h0_w, self_h0_b = linear(",
"is more similar to my achitecture #n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64",
"s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2)",
"not letting disc optimize first layer #and also removing nonlinearity #k_h=5, k_w=5, d_h=2,",
"tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def discriminator_on_z(image, config, reuse=None): batch_size=tf.shape(image)[0] with",
"if not calling for first time returns: probabilities(real) : logits(real) : first layer",
"h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3])",
"config.gf_dim * 8]) h0 = tf.nn.relu(g_bn0(self_h0)) h1, h1_w, h1_b = deconv2d( h0, [batch_size,",
"as vs: g_bn0 = batch_norm(name='g_bn0') g_bn1 = batch_norm(name='g_bn1') g_bn2 = batch_norm(name='g_bn2') g_bn3 =",
"name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables",
"dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables =",
"s_h2, s_w2, config.gf_dim*1], name='g_h3', with_w=True) h3 = tf.nn.relu(g_bn3(h3)) h4, h4_w, h4_b = deconv2d(",
"h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2,",
"achitecture #n_projs=config.df_dim#64 instead of 32 in paper n_projs=config.n_stab_proj#64 instead of 32 in paper",
"= lrelu(d_bn2(conv2d(h1, config.df_dim*4, name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:])",
"= config.gf_dim, config.gf_dim#64,64 s_h2, s_w2 = conv_out_size_same(s_h, 2), conv_out_size_same(s_w, 2) s_h4, s_w4 =",
"tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None):",
"= tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits, variables def discriminator_gen_labeler(image, output_dim, config, reuse=None): batch_size=tf.shape(image)[0] with",
"= lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3]) D_labels_logits",
"= linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables",
"x 64x64x3 image config : see causal_dcgan/config.py reuse : pass True if not",
"DiscriminatorCNN(image, config, reuse=None): ''' Discriminator for GAN model. image : batch_size x 64x64x3",
"batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dl_h0_conv'))#16,32,32,64 h1",
"name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3,",
"name='d_h2_conv')))#16,16,16,248 h3 = lrelu(d_bn3(conv2d(h2, config.df_dim*8, name='d_h3_conv'))) #print('h3shape: ',h3.get_shape().as_list()) #print('8df_dim:',config.df_dim*8) #dim3=tf.reduce_prod(tf.shape(h3)[1:]) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3, [-1,dim3])",
"''' maps z to a 64x64 images with values in [-1,1] uses batch",
"and reshape z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0 =",
"strides=[1, 2, 2, 1], padding='SAME') b_proj = tf.get_variable('b_proj', [n_projs],#does nothing initializer=tf.constant_initializer(0.0),trainable=False) h0=tf.nn.bias_add(conv,b_proj) h1_",
"D_labels_logits = linear(h3_flat, output_dim, 'dl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels,",
"with_w=True) h1 = tf.nn.relu(g_bn1(h1)) h2, h2_w, h2_b = deconv2d( h1, [batch_size, s_h4, s_w4,",
"np slim = tf.contrib.slim import math from causal_dcgan.ops import lrelu,linear,conv_cond_concat,batch_norm,add_minibatch_features from causal_dcgan.ops import",
"linear(h3_flat, output_dim, 'dgl_h3_Label') D_labels = tf.nn.sigmoid(D_labels_logits) variables = tf.contrib.framework.get_variables(vs) return D_labels, D_labels_logits,variables def",
"2), conv_out_size_same(s_w, 2) s_h4, s_w4 = conv_out_size_same(s_h2, 2), conv_out_size_same(s_w2, 2) s_h8, s_w8 =",
"with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 = batch_norm(name='dl_bn3')",
"batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2') dl_bn3 =",
"project `z` and reshape z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True)",
"with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim * 8]) h0 =",
"h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dzl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2,",
"batch_norm(name='d_bn1') d_bn2 = batch_norm(name='d_bn2') d_bn3 = batch_norm(name='d_bn3') if not config.stab_proj: h0 = lrelu(conv2d(image,",
"estimate z from : variables list ''' with tf.variable_scope(\"discriminator\",reuse=reuse) as vs: d_bn1 =",
"name='dgl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dgl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2, config.df_dim*8, name='dgl_h3_conv'))) dim3=np.prod(h3.get_shape().as_list()[1:]) h3_flat=tf.reshape(h3,",
"config, reuse=None): batch_size=tf.shape(image)[0] with tf.variable_scope(\"disc_z_labeler\",reuse=reuse) as vs: dl_bn1 = batch_norm(name='dl_bn1') dl_bn2 = batch_norm(name='dl_bn2')",
"'g_h0_lin', with_w=True) self_h0 = tf.reshape( z_, [-1, s_h16, s_w16, config.gf_dim * 8]) h0",
"5x5 because it is more similar to my achitecture #n_projs=config.df_dim#64 instead of 32",
"= batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2",
"out=tf.nn.tanh(h4) variables = tf.contrib.framework.get_variables(vs) return out, variables def DiscriminatorCNN(image, config, reuse=None): ''' Discriminator",
"`z` and reshape z_, self_h0_w, self_h0_b = linear( z, config.gf_dim*8*s_h16*s_w16, 'g_h0_lin', with_w=True) self_h0",
"logits(real) : first layer activation used to estimate z from : variables list",
"h0 = lrelu(conv2d(image, config.df_dim, name='dzl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dzl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1,",
"h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dl_h1_conv')))#16,16,16,128 h2 = lrelu(dl_bn2(conv2d(h1, config.df_dim*4, name='dl_h2_conv')))#16,16,16,248 h3 = lrelu(dl_bn3(conv2d(h2,",
"dl_bn3 = batch_norm(name='dl_bn3') h0 = lrelu(conv2d(image, config.df_dim, name='dgl_h0_conv'))#16,32,32,64 h1 = lrelu(dl_bn1(conv2d(h0, config.df_dim*2, name='dgl_h1_conv')))#16,16,16,128",
"#k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, #paper used 8x8 kernel, but I'm using 5x5"
] |
[
"\"name\": utils.required_input(\" Enter name for this configuration: \"), \"host\": utils.required_input(\" Enter IBMi host:",
"'{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]])",
"flag. --creds <user> <password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials",
"return cfg return None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating",
"\"--creds\" in args: creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough arguments",
"= get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data,",
"\"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds <user> <password>] --> Re-export an existing",
"this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration(): utils.log(\"Creating new",
"ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred. Please yell at the programmer ;",
"if len(args) == 0: print(\"Not enough arguments passed.\") return False for cmd in",
"utils.required_input(\" Enter user for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \") }",
"as e: utils.log(\"Exception occurred. Please yell at the programmer ; {}\".format(e)) traceback.print_exc() def",
"passed.\") return False for cmd in get_commands(): if args[0] == cmd[0]: cmd[2](args) return",
"'user': utils.required_input(\" Enter user for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \")",
"return False def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]): exit(1) if __name__ ==",
"args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"],",
"not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]):",
"\"library\": utils.required_input(\" Enter library to export: \"), \"output\": utils.required_input(\" Enter output directory path:",
"utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration",
"Enter user for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \") } def",
"= args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough arguments for credentials flag. --creds",
"credentials for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user for host '{}':",
"if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif",
"this configuration: \"), \"host\": utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\" Enter library",
"Enter library to export: \"), \"output\": utils.required_input(\" Enter output directory path: \"), \"formatting\":",
"credentials flag. --creds <user> <password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else:",
"lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred.",
"<user> <password>] --> Re-export an existing library\", \" [-h] --> Display help information\",",
"in get_commands(): if args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}' Not",
"name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration(): utils.log(\"Creating new configuration...\")",
"utils.log(\"Exception occurred. Please yell at the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([",
"ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred. Please yell at the programmer",
"'{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user for host '{}': \".format(host)), 'pw': utils.required_pass(\"",
"import path import utils as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name):",
"<user> <password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided.",
"utils as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in",
"utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg return None def append_configuration(new_data): if not",
"+ [new_data]) return new_data def new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\"",
"[(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)] def process_args(args): if len(args)",
"user for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \") } def new_library(args):",
"configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user for host '{}': \".format(host)), 'pw':",
"= IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except",
"<user> <password>] --> Setup a new library\" ])) def get_commands(): return [(\"-e\", 1,",
"[--creds <user> <password>] --> Re-export an existing library\", \" [-h] --> Display help",
"print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds <user> <password>] --> Re-export an",
"} def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\"",
"0, new_library)] def process_args(args): if len(args) == 0: print(\"Not enough arguments passed.\") return",
"import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name:",
"new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this",
"def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter",
"in args: creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough arguments for",
"[-e <library name>] [--creds <user> <password>] --> Re-export an existing library\", \" [-h]",
"CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg",
"if cfg[\"name\"] == name: return cfg return None def append_configuration(new_data): if not path.exists(CONFIG_PATH):",
"\") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib",
"try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e:",
"information\", \" [-n] [--creds <user> <password>] --> Setup a new library\" ])) def",
"cmd[2](args) return True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False def main(): utils.log(\"Program",
"return { \"name\": utils.required_input(\" Enter name for this configuration: \"), \"host\": utils.required_input(\" Enter",
"Enter name for this configuration: \"), \"host\": utils.required_input(\" Enter IBMi host: \"), \"library\":",
"> len(args): print(\"Not enough arguments for credentials flag. --creds <user> <password>\") exit(1) creds",
"is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return { 'user':",
"--> Setup a new library\" ])) def get_commands(): return [(\"-e\", 1, export_library), (\"-h\",",
"from os import path import utils as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\"",
"{'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting for credentials...\")",
"1, export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)] def process_args(args): if len(args) ==",
"print_help), (\"-n\", 0, new_library)] def process_args(args): if len(args) == 0: print(\"Not enough arguments",
"None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH))",
"Please yell at the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\",",
"exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration(): utils.log(\"Creating new configuration...\") return",
"utils.log(\"ERROR: Configuration already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return",
"config = get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in args: creds_idx = args.index(\"--creds\")",
"exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user>",
"= ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred. Please",
"as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH):",
"\"output\": utils.required_input(\" Enter output directory path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting into",
"args: creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough arguments for credentials",
"yell at the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \"",
"[new_data]) return new_data def new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\" Enter",
"--> Display help information\", \" [-n] [--creds <user> <password>] --> Setup a new",
"def get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)] def",
"for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg return None def",
"print(\"Not enough arguments passed.\") return False for cmd in get_commands(): if args[0] ==",
"True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False def main(): utils.log(\"Program started.\") if",
"utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\" Enter library to export: \"), \"output\":",
"return [(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)] def process_args(args): if",
"if args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return",
"\"), \"formatting\": utils.bool_input(\" Inject additional formatting into source?\", is_req=True), } def get_credentials(config_name, host):",
"new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib = config[\"library\"] if",
"os import path import utils as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def",
"Inject additional formatting into source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials for",
"ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as",
"name>] [--creds <user> <password>] --> Re-export an existing library\", \" [-h] --> Display",
"{}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds <user>",
"path import utils as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for",
"library\", \" [-h] --> Display help information\", \" [-n] [--creds <user> <password>] -->",
"== cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False def",
"if creds_idx+2 > len(args): print(\"Not enough arguments for credentials flag. --creds <user> <password>\")",
"<user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try:",
"get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in args: creds_idx = args.index(\"--creds\") if creds_idx+2",
"get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib),",
"IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception",
"name: return cfg return None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found.",
"print(\"Not enough arguments for credentials flag. --creds <user> <password>\") exit(1) creds = {'user':",
"host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \") } def new_library(args): export_library(args +",
"path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting into source?\", is_req=True), } def get_credentials(config_name,",
"utils.bool_input(\" Inject additional formatting into source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials",
"the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library",
"print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds <user> <password>] --> Re-export",
"print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False def main(): utils.log(\"Program started.\") if not",
"== name: return cfg return None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not",
"credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data =",
"return new_data def new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\" Enter name",
"programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>]",
"{ \"name\": utils.required_input(\" Enter name for this configuration: \"), \"host\": utils.required_input(\" Enter IBMi",
"Re-export an existing library\", \" [-h] --> Display help information\", \" [-n] [--creds",
"\" [-n] [--creds <user> <password>] --> Setup a new library\" ])) def get_commands():",
"--creds <user> <password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not",
"cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg return None def append_configuration(new_data):",
"help information\", \" [-n] [--creds <user> <password>] --> Setup a new library\" ]))",
"0, print_help), (\"-n\", 0, new_library)] def process_args(args): if len(args) == 0: print(\"Not enough",
"argument: '{}' Not found.\".format(args[0])) return False def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]):",
"by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration(): utils.log(\"Creating",
"Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by",
"at the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e",
"Configuration already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data",
"for credentials flag. --creds <user> <password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]}",
"e: utils.log(\"Exception occurred. Please yell at the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args):",
"elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) +",
"Display help information\", \" [-n] [--creds <user> <password>] --> Setup a new library\"",
"IBMi host: \"), \"library\": utils.required_input(\" Enter library to export: \"), \"output\": utils.required_input(\" Enter",
"for cmd in get_commands(): if args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid argument:",
"already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def",
"<password>] --> Re-export an existing library\", \" [-h] --> Display help information\", \"",
"enough arguments passed.\") return False for cmd in get_commands(): if args[0] == cmd[0]:",
"not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already",
"HELP:\", \" [-e <library name>] [--creds <user> <password>] --> Re-export an existing library\",",
"file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this name.\") exit(1)",
"existing library\", \" [-h] --> Display help information\", \" [-n] [--creds <user> <password>]",
"+ [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in",
"for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data",
"os, traceback from os import path import utils as utils from IBMi import",
"cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False def main():",
"0: print(\"Not enough arguments passed.\") return False for cmd in get_commands(): if args[0]",
"a new library\" ])) def get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0, print_help),",
"import utils as utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg",
"args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough arguments for credentials flag. --creds <user>",
"False def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]): exit(1) if __name__ == \"__main__\":",
"host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user for",
"= config[\"library\"] if \"--creds\" in args: creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args):",
"utils.required_input(\" Enter output directory path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting into source?\",",
"except Exception as e: utils.log(\"Exception occurred. Please yell at the programmer ; {}\".format(e))",
"config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this name.\")",
"cfg[\"name\"] == name: return cfg return None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{}",
"creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib)",
"\"formatting\": utils.bool_input(\" Inject additional formatting into source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching",
"len(args): print(\"Not enough arguments for credentials flag. --creds <user> <password>\") exit(1) creds =",
"len(args) == 0: print(\"Not enough arguments passed.\") return False for cmd in get_commands():",
"occurred. Please yell at the programmer ; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo",
"== 0: print(\"Not enough arguments passed.\") return False for cmd in get_commands(): if",
"config[\"library\"] if \"--creds\" in args: creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not",
"cfg return None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new",
"export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)] def process_args(args): if len(args) == 0:",
"utils from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if",
"for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password: \") } def new_library(args): export_library(args",
"= get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in args: creds_idx = args.index(\"--creds\") if",
"if \"--creds\" in args: creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough",
"not provided. --creds <user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi =",
"password: \") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1])",
"Enter IBMi host: \"), \"library\": utils.required_input(\" Enter library to export: \"), \"output\": utils.required_input(\"",
"export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib = config[\"library\"] if \"--creds\"",
"--creds <user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"])",
"\"), \"output\": utils.required_input(\" Enter output directory path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting",
"into source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return",
"'{}' Not found.\".format(args[0])) return False def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]): exit(1)",
"new_data def new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\" Enter name for",
"arguments for credentials flag. --creds <user> <password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw':",
"def export_library(args): config = get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in args: creds_idx",
"cmd in get_commands(): if args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}'",
"return True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False def main(): utils.log(\"Program started.\")",
"def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds <user> <password>] -->",
"\" [-e <library name>] [--creds <user> <password>] --> Re-export an existing library\", \"",
"return False for cmd in get_commands(): if args[0] == cmd[0]: cmd[2](args) return True",
"IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] ==",
"utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH,",
"export: \"), \"output\": utils.required_input(\" Enter output directory path: \"), \"formatting\": utils.bool_input(\" Inject additional",
"configuration...\") return { \"name\": utils.required_input(\" Enter name for this configuration: \"), \"host\": utils.required_input(\"",
"[append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in args:",
"\" [-h] --> Display help information\", \" [-n] [--creds <user> <password>] --> Setup",
"'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting for credentials...\") creds =",
"an existing library\", \" [-h] --> Display help information\", \" [-n] [--creds <user>",
"ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred. Please yell at",
"= {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting for",
"def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]): exit(1) if __name__ == \"__main__\": main()",
"traceback from os import path import utils as utils from IBMi import IBMi",
"\"), \"library\": utils.required_input(\" Enter library to export: \"), \"output\": utils.required_input(\" Enter output directory",
"creds_idx+2 > len(args): print(\"Not enough arguments for credentials flag. --creds <user> <password>\") exit(1)",
"traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds <user> <password>]",
"sys, os, traceback from os import path import utils as utils from IBMi",
"for this configuration: \"), \"host\": utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\" Enter",
"creds_idx = args.index(\"--creds\") if creds_idx+2 > len(args): print(\"Not enough arguments for credentials flag.",
"utils.required_input(\" Enter name for this configuration: \"), \"host\": utils.required_input(\" Enter IBMi host: \"),",
"def process_args(args): if len(args) == 0: print(\"Not enough arguments passed.\") return False for",
"ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception",
"exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration():",
"Enter password: \") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config =",
"configuration: \"), \"host\": utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\" Enter library to",
"lib = config[\"library\"] if \"--creds\" in args: creds_idx = args.index(\"--creds\") if creds_idx+2 >",
"])) def get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)]",
"IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return",
"directory path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting into source?\", is_req=True), } def",
"get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg return None",
"utils.required_input(\" Enter library to export: \"), \"output\": utils.required_input(\" Enter output directory path: \"),",
"new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\" Enter name for this configuration:",
"else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"])",
"[]) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH)",
"get_commands(): if args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}' Not found.\".format(args[0]))",
"import sys, os, traceback from os import path import utils as utils from",
"from IBMi import IBMi CONFIG_PATH=\"./config.json\" def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"]",
"def new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\" Enter name for this",
"config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json')",
"Enter output directory path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting into source?\", is_req=True),",
"--> Re-export an existing library\", \" [-h] --> Display help information\", \" [-n]",
"get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\", 0, new_library)] def process_args(args):",
"return None def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config",
"return { 'user': utils.required_input(\" Enter user for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter",
"ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds) lib_data = ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data)",
"Exception as e: utils.log(\"Exception occurred. Please yell at the programmer ; {}\".format(e)) traceback.print_exc()",
"(\"-h\", 0, print_help), (\"-n\", 0, new_library)] def process_args(args): if len(args) == 0: print(\"Not",
"\"host\": utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\" Enter library to export: \"),",
"new configuration...\") return { \"name\": utils.required_input(\" Enter name for this configuration: \"), \"host\":",
"path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR:",
"utils.required_pass(\" Enter password: \") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config",
"library\" ])) def get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\", 0,",
"\"), \"host\": utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\" Enter library to export:",
"new library\" ])) def get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0, print_help), (\"-n\",",
"arguments passed.\") return False for cmd in get_commands(): if args[0] == cmd[0]: cmd[2](args)",
"to export: \"), \"output\": utils.required_input(\" Enter output directory path: \"), \"formatting\": utils.bool_input(\" Inject",
"{ 'user': utils.required_input(\" Enter user for host '{}': \".format(host)), 'pw': utils.required_pass(\" Enter password:",
"[-n] [--creds <user> <password>] --> Setup a new library\" ])) def get_commands(): return",
"source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return {",
"get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user",
"; {}\".format(e)) traceback.print_exc() def print_help(args): print(\"\\n\".join([ \"IBMi-lib-repo HELP:\", \" [-e <library name>] [--creds",
"formatting into source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name))",
"append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, [])",
"library to export: \"), \"output\": utils.required_input(\" Enter output directory path: \"), \"formatting\": utils.bool_input(\"",
"in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg return None def append_configuration(new_data): if",
"provided. --creds <user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"])",
"utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration(): utils.log(\"Creating new configuration...\") return { \"name\":",
"name for this configuration: \"), \"host\": utils.required_input(\" Enter IBMi host: \"), \"library\": utils.required_input(\"",
"'pw': utils.required_pass(\" Enter password: \") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args):",
"def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib = config[\"library\"]",
"<password>] --> Setup a new library\" ])) def get_commands(): return [(\"-e\", 1, export_library),",
"utils.log(\"Fetching credentials for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user for host",
"args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid argument: '{}' Not found.\".format(args[0])) return False",
"(\"-n\", 0, new_library)] def process_args(args): if len(args) == 0: print(\"Not enough arguments passed.\")",
"utils.log(\"Creating new configuration...\") return { \"name\": utils.required_input(\" Enter name for this configuration: \"),",
"enough arguments for credentials flag. --creds <user> <password>\") exit(1) creds = {'user': args[creds_idx+1],",
"new_library)] def process_args(args): if len(args) == 0: print(\"Not enough arguments passed.\") return False",
"Setup a new library\" ])) def get_commands(): return [(\"-e\", 1, export_library), (\"-h\", 0,",
"found.\".format(args[0])) return False def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]): exit(1) if __name__",
"found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH, []) elif get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists",
"[-h] --> Display help information\", \" [-n] [--creds <user> <password>] --> Setup a",
"output directory path: \"), \"formatting\": utils.bool_input(\" Inject additional formatting into source?\", is_req=True), }",
"<library name>] [--creds <user> <password>] --> Re-export an existing library\", \" [-h] -->",
"export_library(args): config = get_configuration(args[1]) lib = config[\"library\"] if \"--creds\" in args: creds_idx =",
"args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting for credentials...\") creds",
"additional formatting into source?\", is_req=True), } def get_credentials(config_name, host): utils.log(\"Fetching credentials for configuration",
"utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data]) return new_data def new_configuration(): utils.log(\"Creating new configuration...\") return {",
"creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds <user> <password>\\nPrompting",
"'{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred. Please yell at the",
"process_args(args): if len(args) == 0: print(\"Not enough arguments passed.\") return False for cmd",
"<password>\") exit(1) creds = {'user': args[creds_idx+1], 'pw': args[creds_idx+2]} else: print(\"Credentials not provided. --creds",
"False for cmd in get_commands(): if args[0] == cmd[0]: cmd[2](args) return True print(\"Invalid",
"Not found.\".format(args[0])) return False def main(): utils.log(\"Program started.\") if not process_args(sys.argv[1:]): exit(1) if",
"def get_configuration(name): for cfg in utils.read_file_json(CONFIG_PATH): if cfg[\"name\"] == name: return cfg return",
"def append_configuration(new_data): if not path.exists(CONFIG_PATH): utils.log(\"{} not found. Creating new config file...\".format(CONFIG_PATH)) utils.write_file_json(CONFIG_PATH,",
"} def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def export_library(args): config = get_configuration(args[1]) lib =",
"for configuration '{}'...\".format(config_name)) return { 'user': utils.required_input(\" Enter user for host '{}': \".format(host)),",
"\".format(host)), 'pw': utils.required_pass(\" Enter password: \") } def new_library(args): export_library(args + [append_configuration(new_configuration())[\"name\"]]) def",
"ibmi.get_library_data(lib) ibmi.write_file(lib_data, '{}/lib_data'.format(lib), ext='json') ibmi.generate_repo(lib_data) except Exception as e: utils.log(\"Exception occurred. Please yell",
"get_configuration(new_data[\"name\"]): utils.log(\"ERROR: Configuration already exists by this name.\") exit(1) utils.write_file_json(CONFIG_PATH, utils.read_file_json(CONFIG_PATH) + [new_data])",
"print(\"Credentials not provided. --creds <user> <password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi",
"<password>\\nPrompting for credentials...\") creds = get_credentials(config[\"name\"], config[\"host\"]) ibmi = IBMi(out_path=config[\"output\"]) ibmi.connect(config[\"host\"]) try: ibmi.login(creds)",
"host: \"), \"library\": utils.required_input(\" Enter library to export: \"), \"output\": utils.required_input(\" Enter output",
"[--creds <user> <password>] --> Setup a new library\" ])) def get_commands(): return [(\"-e\","
] |
[
"\"\": # Only add the value if it is non-empty context[\"value\"] = self.format_value(value)",
"Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value) class Input(Widget):",
"HTML, as a Unicode string. The 'value' given is not guaranteed to be",
"get_context(self, name, value, attrs=None): context = { \"widget\": self, \"type\": self.input_type, \"name\": name,",
"template_name super(Input, self).__init__(*args, **kwargs) # This attribute is used to inject a surrounding",
"\"input_type\") else False # Backported from Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def",
"or {}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name =",
"allow stricter comparisons # for widget attrs. See #25. if self.is_hidden: context[\"hidden\"] =",
"if datalist is not None: self.datalist = datalist template_name = kwargs.pop(\"template_name\", None) if",
"'value' given is not guaranteed to be valid input, so subclass implementations should",
"= True if value is None: value = \"\" if value != \"\":",
"new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return",
"widget attrs. See #25. if self.is_hidden: context[\"hidden\"] = True if value is None:",
"show up only as 'key' # Casting to a string so that it",
"the # floppyforms templatetags, when rendered inside a complete form. self.context_instance = None",
"flat.update(d) return flat else: return context def flatten_contexts(*contexts): \"\"\"Takes a list of context",
"context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\" input_type",
"\"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, } # True",
"= kwargs.pop(\"template_name\", None) if template_name is not None: self.template_name = template_name super(Input, self).__init__(*args,",
"context is not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context,",
"given is not guaranteed to be valid input, so subclass implementations should program",
"to True # See #25. if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if",
"None) if template_name is None: template_name = self.template_name context = self.get_context(name, value, attrs=attrs",
"from the unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget. It has not been",
"self.template_name context = self.get_context(name, value, attrs=attrs or {}) context = flatten_contexts(self.context_instance, context) return",
"= formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs=None): context = { \"widget\":",
"class Input(Widget): template_name = \"widgets/input.html\" input_type = None datalist = None def __init__(self,",
"if kwargs.get(\"attrs\", None) is not None: self.input_type = kwargs[\"attrs\"].pop(\"type\", self.input_type) super(TextInput, self).__init__(*args, **kwargs)",
"for key, attr in context[\"attrs\"].items(): if attr == 1: # 1 == True",
"template_name = self.template_name context = self.get_context(name, value, attrs=attrs or {}) context = flatten_contexts(self.context_instance,",
"for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return new_context",
"None datalist = None def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if",
"def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10 Helper function for building",
"flatten_contexts(*contexts): \"\"\"Takes a list of context instances and returns a new dict that",
"\"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs # Backported from",
"context[\"datalist\"] = self.datalist return context def render(self, name, value, attrs=None, **kwargs): template_name =",
"REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return new_context class Widget(forms.Widget): is_required",
"**kwargs) # This attribute is used to inject a surrounding context in the",
"NotImplementedError(\"subclasses of Widget must provide a render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\"",
"function for building an attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs:",
"hasattr(self, \"input_type\") else False # Backported from Django 1.9 if not hasattr(forms.Widget, \"format_value\"):",
"new_context class Widget(forms.Widget): is_required = False def render(self, name, value, attrs=None, renderer=None): \"\"\"",
"if extra_attrs: attrs.update(extra_attrs) return attrs # Backported from Django 1.7 @property def is_hidden(self):",
"\"\"\" raise NotImplementedError(\"subclasses of Widget must provide a render() method\") def build_attrs(self, extra_attrs=None,",
"return loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\" def __init__(self,",
"#25. if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist is not None:",
"Django 1.7 @property def is_hidden(self): return self.input_type == \"hidden\" if hasattr(self, \"input_type\") else",
"attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return new_context class",
"import Context, loader from django.utils import formats from django.utils.encoding import force_text class DictContext(dict):",
"def format_value(self, value): return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type = None",
"# Only add the value if it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data())",
"self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if attr == 1: # 1 ==",
"is extracted from the unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget. It has",
"attrs.update(extra_attrs) return attrs # Backported from Django 1.7 @property def is_hidden(self): return self.input_type",
"name, value, attrs=None, renderer=None): \"\"\" Returns this Widget rendered as HTML, as a",
"setattr(new_context, attr, getattr(context, attr)) return new_context class Widget(forms.Widget): is_required = False def render(self,",
"them.\"\"\" new_context = DictContext() for context in contexts: if context is not None:",
"1: # 1 == True so 'key=\"1\"' will show up only as 'key'",
"This is extracted from the unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget. It",
"template_name = \"widgets/text.html\" input_type = \"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None)",
"= dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs # Backported from Django 1.7",
"\"widgets/text.html\" input_type = \"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is not",
"input, so subclass implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must",
"1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value) class Input(Widget): template_name",
"the value if it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs)",
"= None datalist = None def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None)",
"# This attribute is used to inject a surrounding context in the #",
"Context): flat = {} for d in context.dicts: flat.update(d) return flat else: return",
"self.is_hidden, \"required\": self.is_required, \"True\": True, } # True is injected in the context",
"import forms from django.template import Context, loader from django.utils import formats from django.utils.encoding",
"context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if",
"self.get_context(name, value, attrs=attrs or {}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class",
"str(attr) if self.datalist is not None: context[\"datalist\"] = self.datalist return context def render(self,",
"not None: context[\"datalist\"] = self.datalist return context def render(self, name, value, attrs=None, **kwargs):",
"rendered as HTML, as a Unicode string. The 'value' given is not guaranteed",
"**kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist is not None: self.datalist = datalist",
"if value != \"\": # Only add the value if it is non-empty",
"up only as 'key' # Casting to a string so that it doesn't",
"# Backported from Django 1.7 @property def is_hidden(self): return self.input_type == \"hidden\" if",
"= {} for d in context.dicts: flat.update(d) return flat else: return context def",
"None def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist is not",
"new_context = DictContext() for context in contexts: if context is not None: new_context.update(flatten_context(context))",
"\"required\": self.is_required, \"True\": True, } # True is injected in the context to",
"name, value, attrs=None): context = { \"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\":",
"a new dict that combines all of them.\"\"\" new_context = DictContext() for context",
"= template_name super(Input, self).__init__(*args, **kwargs) # This attribute is used to inject a",
"None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs) # This attribute is used to",
"context def render(self, name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name",
"if self.is_hidden: context[\"hidden\"] = True if value is None: value = \"\" if",
"= DictContext() for context in contexts: if context is not None: new_context.update(flatten_context(context)) for",
"context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if attr == 1:",
"class TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\" def __init__(self, *args, **kwargs): if",
"template_name is None: template_name = self.template_name context = self.get_context(name, value, attrs=attrs or {})",
"value, attrs=None): context = { \"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden,",
"input_type = None datalist = None def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\",",
"a datalist widget. It has not been cleaned up yet. \"\"\" from django",
"= self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if attr == 1: # 1",
"a Unicode string. The 'value' given is not guaranteed to be valid input,",
"self.is_hidden: context[\"hidden\"] = True if value is None: value = \"\" if value",
"as a Unicode string. The 'value' given is not guaranteed to be valid",
"None) if datalist is not None: self.datalist = datalist template_name = kwargs.pop(\"template_name\", None)",
"returns a new dict that combines all of them.\"\"\" new_context = DictContext() for",
"Widget must provide a render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from",
"kwargs.pop(\"template_name\", None) if template_name is not None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs)",
"'key' # Casting to a string so that it doesn't equal to True",
"value): if self.is_localized: value = formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs=None):",
"instances and returns a new dict that combines all of them.\"\"\" new_context =",
"is not None: context[\"datalist\"] = self.datalist return context def render(self, name, value, attrs=None,",
"from Django 1.10 Helper function for building an attribute dictionary. \"\"\" attrs =",
"is_hidden(self): return self.input_type == \"hidden\" if hasattr(self, \"input_type\") else False # Backported from",
"value if it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for",
"value, attrs=attrs or {}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input):",
"value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name is None: template_name =",
"attr)) return new_context class Widget(forms.Widget): is_required = False def render(self, name, value, attrs=None,",
"that combines all of them.\"\"\" new_context = DictContext() for context in contexts: if",
"value != \"\": # Only add the value if it is non-empty context[\"value\"]",
"def render(self, name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name is",
"# Backported from Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return",
"None def get_context_data(self): return {} def format_value(self, value): if self.is_localized: value = formats.localize_input(value)",
"= self.template_name context = self.get_context(name, value, attrs=attrs or {}) context = flatten_contexts(self.context_instance, context)",
"= False def render(self, name, value, attrs=None, renderer=None): \"\"\" Returns this Widget rendered",
"is used to inject a surrounding context in the # floppyforms templatetags, when",
"{} def format_value(self, value): if self.is_localized: value = formats.localize_input(value) return force_text(value) def get_context(self,",
"*args, **kwargs): if kwargs.get(\"attrs\", None) is not None: self.input_type = kwargs[\"attrs\"].pop(\"type\", self.input_type) super(TextInput,",
"templatetags, when rendered inside a complete form. self.context_instance = None def get_context_data(self): return",
"render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10 Helper function",
"in context[\"attrs\"].items(): if attr == 1: # 1 == True so 'key=\"1\"' will",
"\"\"\" This is extracted from the unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget.",
"if value is None: value = \"\" if value != \"\": # Only",
"if attr == 1: # 1 == True so 'key=\"1\"' will show up",
"if context is not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr):",
"from Django 1.7 @property def is_hidden(self): return self.input_type == \"hidden\" if hasattr(self, \"input_type\")",
"attribute is used to inject a surrounding context in the # floppyforms templatetags,",
"used to inject a surrounding context in the # floppyforms templatetags, when rendered",
"attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs #",
"# 1 == True so 'key=\"1\"' will show up only as 'key' #",
"= str(attr) if self.datalist is not None: context[\"datalist\"] = self.datalist return context def",
"= self.get_context(name, value, attrs=attrs or {}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context)",
"it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr",
"import formats from django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\",",
"attr, getattr(context, attr)) return new_context class Widget(forms.Widget): is_required = False def render(self, name,",
"\"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, } # True is injected in the",
"that it doesn't equal to True # See #25. if not isinstance(attr, bool):",
"flat else: return context def flatten_contexts(*contexts): \"\"\"Takes a list of context instances and",
"return new_context class Widget(forms.Widget): is_required = False def render(self, name, value, attrs=None, renderer=None):",
"subclass implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must provide a",
"inject a surrounding context in the # floppyforms templatetags, when rendered inside a",
"from django import forms from django.template import Context, loader from django.utils import formats",
"} # True is injected in the context to allow stricter comparisons #",
"attrs. See #25. if self.is_hidden: context[\"hidden\"] = True if value is None: value",
"the context to allow stricter comparisons # for widget attrs. See #25. if",
"equal to True # See #25. if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr)",
"attrs=None): context = { \"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\":",
"if it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key,",
"== 1: # 1 == True so 'key=\"1\"' will show up only as",
"\"format_value\"): def format_value(self, value): return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type =",
"= \"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is not None: self.input_type",
"def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is not None: self.input_type = kwargs[\"attrs\"].pop(\"type\",",
"1.10 Helper function for building an attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs)",
"See #25. if self.is_hidden: context[\"hidden\"] = True if value is None: value =",
"is not guaranteed to be valid input, so subclass implementations should program defensively.",
"is None: template_name = self.template_name context = self.get_context(name, value, attrs=attrs or {}) context",
"method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10 Helper function for",
"context[\"attrs\"].items(): if attr == 1: # 1 == True so 'key=\"1\"' will show",
"it doesn't equal to True # See #25. if not isinstance(attr, bool): context[\"attrs\"][key]",
"stricter comparisons # for widget attrs. See #25. if self.is_hidden: context[\"hidden\"] = True",
"**kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs # Backported from Django 1.7 @property def",
"self.input_type == \"hidden\" if hasattr(self, \"input_type\") else False # Backported from Django 1.9",
"must provide a render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django",
"template_name = kwargs.pop(\"template_name\", None) if template_name is None: template_name = self.template_name context =",
"None: value = \"\" if value != \"\": # Only add the value",
"is_required = False def render(self, name, value, attrs=None, renderer=None): \"\"\" Returns this Widget",
"# for widget attrs. See #25. if self.is_hidden: context[\"hidden\"] = True if value",
"'key=\"1\"' will show up only as 'key' # Casting to a string so",
"Returns this Widget rendered as HTML, as a Unicode string. The 'value' given",
"context def flatten_contexts(*contexts): \"\"\"Takes a list of context instances and returns a new",
"return context def flatten_contexts(*contexts): \"\"\"Takes a list of context instances and returns a",
"\"\"\" from django import forms from django.template import Context, loader from django.utils import",
"1 == True so 'key=\"1\"' will show up only as 'key' # Casting",
"in the # floppyforms templatetags, when rendered inside a complete form. self.context_instance =",
"if template_name is None: template_name = self.template_name context = self.get_context(name, value, attrs=attrs or",
"True if value is None: value = \"\" if value != \"\": #",
"from django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", )",
"force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def flatten_context(context): if",
"return force_text(value) def get_context(self, name, value, attrs=None): context = { \"widget\": self, \"type\":",
"Backported from Django 1.7 @property def is_hidden(self): return self.input_type == \"hidden\" if hasattr(self,",
"# Casting to a string so that it doesn't equal to True #",
"bool): context[\"attrs\"][key] = str(attr) if self.datalist is not None: context[\"datalist\"] = self.datalist return",
"in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return new_context class Widget(forms.Widget):",
"not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context,",
"= datalist template_name = kwargs.pop(\"template_name\", None) if template_name is not None: self.template_name =",
"django import forms from django.template import Context, loader from django.utils import formats from",
"string so that it doesn't equal to True # See #25. if not",
"class Widget(forms.Widget): is_required = False def render(self, name, value, attrs=None, renderer=None): \"\"\" Returns",
"import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def flatten_context(context):",
"program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must provide a render() method\") def",
"datalist template_name = kwargs.pop(\"template_name\", None) if template_name is not None: self.template_name = template_name",
"value, attrs=None, renderer=None): \"\"\" Returns this Widget rendered as HTML, as a Unicode",
"context.dicts: flat.update(d) return flat else: return context def flatten_contexts(*contexts): \"\"\"Takes a list of",
"context in the # floppyforms templatetags, when rendered inside a complete form. self.context_instance",
"\"True\": True, } # True is injected in the context to allow stricter",
"should program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must provide a render() method\")",
"list of context instances and returns a new dict that combines all of",
"if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return new_context class Widget(forms.Widget): is_required =",
"= kwargs.pop(\"datalist\", None) if datalist is not None: self.datalist = datalist template_name =",
"from Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value) class",
"The 'value' given is not guaranteed to be valid input, so subclass implementations",
"dict that combines all of them.\"\"\" new_context = DictContext() for context in contexts:",
"up yet. \"\"\" from django import forms from django.template import Context, loader from",
"{}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\"",
"return self.input_type == \"hidden\" if hasattr(self, \"input_type\") else False # Backported from Django",
"return attrs # Backported from Django 1.7 @property def is_hidden(self): return self.input_type ==",
"if self.is_localized: value = formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs=None): context",
"not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\"",
"value): return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type = None datalist =",
"if isinstance(context, Context): flat = {} for d in context.dicts: flat.update(d) return flat",
"It has not been cleaned up yet. \"\"\" from django import forms from",
"Only add the value if it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"]",
"attrs # Backported from Django 1.7 @property def is_hidden(self): return self.input_type == \"hidden\"",
"to a string so that it doesn't equal to True # See #25.",
"been cleaned up yet. \"\"\" from django import forms from django.template import Context,",
"= None def get_context_data(self): return {} def format_value(self, value): if self.is_localized: value =",
"will show up only as 'key' # Casting to a string so that",
"rendered inside a complete form. self.context_instance = None def get_context_data(self): return {} def",
"input_type = \"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is not None:",
"Backported from Django 1.10 Helper function for building an attribute dictionary. \"\"\" attrs",
"getattr(context, attr)) return new_context class Widget(forms.Widget): is_required = False def render(self, name, value,",
"flat = {} for d in context.dicts: flat.update(d) return flat else: return context",
"\"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, } # True is injected",
"**kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name is None: template_name = self.template_name context",
"context instances and returns a new dict that combines all of them.\"\"\" new_context",
"format_value(self, value): return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type = None datalist",
"kwargs.pop(\"datalist\", None) if datalist is not None: self.datalist = datalist template_name = kwargs.pop(\"template_name\",",
"yet. \"\"\" from django import forms from django.template import Context, loader from django.utils",
"def flatten_context(context): if isinstance(context, Context): flat = {} for d in context.dicts: flat.update(d)",
"the unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget. It has not been cleaned",
"self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, } # True is",
"template_name is not None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs) # This attribute",
"Widget rendered as HTML, as a Unicode string. The 'value' given is not",
"True is injected in the context to allow stricter comparisons # for widget",
"self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if attr ==",
"None: context[\"datalist\"] = self.datalist return context def render(self, name, value, attrs=None, **kwargs): template_name",
"isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist is not None: context[\"datalist\"] = self.datalist",
"datalist = None def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist",
"force_text(value) def get_context(self, name, value, attrs=None): context = { \"widget\": self, \"type\": self.input_type,",
"Unicode string. The 'value' given is not guaranteed to be valid input, so",
"None) if template_name is not None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs) #",
"= \"\" if value != \"\": # Only add the value if it",
"Context, loader from django.utils import formats from django.utils.encoding import force_text class DictContext(dict): pass",
"Widget(forms.Widget): is_required = False def render(self, name, value, attrs=None, renderer=None): \"\"\" Returns this",
"self.is_required, \"True\": True, } # True is injected in the context to allow",
"context to allow stricter comparisons # for widget attrs. See #25. if self.is_hidden:",
"= \"widgets/text.html\" input_type = \"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is",
"context = self.get_context(name, value, attrs=attrs or {}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name,",
"django.utils import formats from django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = (",
"else: return context def flatten_contexts(*contexts): \"\"\"Takes a list of context instances and returns",
"is not None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs) # This attribute is",
"complete form. self.context_instance = None def get_context_data(self): return {} def format_value(self, value): if",
"a surrounding context in the # floppyforms templatetags, when rendered inside a complete",
"implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must provide a render()",
"\"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, }",
"a string so that it doesn't equal to True # See #25. if",
"in contexts: if context is not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if",
"defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must provide a render() method\") def build_attrs(self,",
"d in context.dicts: flat.update(d) return flat else: return context def flatten_contexts(*contexts): \"\"\"Takes a",
"a list of context instances and returns a new dict that combines all",
"Backported from Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value)",
"if self.datalist is not None: context[\"datalist\"] = self.datalist return context def render(self, name,",
"name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name is None: template_name",
"context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\" def",
"self.datalist = datalist template_name = kwargs.pop(\"template_name\", None) if template_name is not None: self.template_name",
"be valid input, so subclass implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses of",
"get_context_data(self): return {} def format_value(self, value): if self.is_localized: value = formats.localize_input(value) return force_text(value)",
"template_name = \"widgets/input.html\" input_type = None datalist = None def __init__(self, *args, **kwargs):",
"datalist widget. It has not been cleaned up yet. \"\"\" from django import",
"\"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context, Context): flat = {} for d",
"Input(Widget): template_name = \"widgets/input.html\" input_type = None datalist = None def __init__(self, *args,",
"flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\"",
"attrs=attrs or {}) context = flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name",
"a render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10 Helper",
"DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context, Context):",
"== True so 'key=\"1\"' will show up only as 'key' # Casting to",
"\"_form_render\", ) def flatten_context(context): if isinstance(context, Context): flat = {} for d in",
"floppyforms templatetags, when rendered inside a complete form. self.context_instance = None def get_context_data(self):",
"building an attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return",
"key, attr in context[\"attrs\"].items(): if attr == 1: # 1 == True so",
"\"hidden\" if hasattr(self, \"input_type\") else False # Backported from Django 1.9 if not",
"is injected in the context to allow stricter comparisons # for widget attrs.",
"True so 'key=\"1\"' will show up only as 'key' # Casting to a",
"super(Input, self).__init__(*args, **kwargs) # This attribute is used to inject a surrounding context",
"= kwargs.pop(\"template_name\", None) if template_name is None: template_name = self.template_name context = self.get_context(name,",
"new dict that combines all of them.\"\"\" new_context = DictContext() for context in",
"renderer=None): \"\"\" Returns this Widget rendered as HTML, as a Unicode string. The",
"False def render(self, name, value, attrs=None, renderer=None): \"\"\" Returns this Widget rendered as",
"False # Backported from Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value):",
"{ \"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True,",
"combines all of them.\"\"\" new_context = DictContext() for context in contexts: if context",
"to be valid input, so subclass implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses",
"render(self, name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name is None:",
"#25. if self.is_hidden: context[\"hidden\"] = True if value is None: value = \"\"",
"an attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs",
"attr in context[\"attrs\"].items(): if attr == 1: # 1 == True so 'key=\"1\"'",
"= { \"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\":",
"( \"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context, Context): flat = {} for",
"# See #25. if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist is",
"and returns a new dict that combines all of them.\"\"\" new_context = DictContext()",
"not guaranteed to be valid input, so subclass implementations should program defensively. \"\"\"",
"value = \"\" if value != \"\": # Only add the value if",
"**kwargs): \"\"\" Backported from Django 1.10 Helper function for building an attribute dictionary.",
"\"widgets/input.html\" input_type = None datalist = None def __init__(self, *args, **kwargs): datalist =",
"def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist is not None:",
"inside a complete form. self.context_instance = None def get_context_data(self): return {} def format_value(self,",
"extra_attrs: attrs.update(extra_attrs) return attrs # Backported from Django 1.7 @property def is_hidden(self): return",
"== \"hidden\" if hasattr(self, \"input_type\") else False # Backported from Django 1.9 if",
"for d in context.dicts: flat.update(d) return flat else: return context def flatten_contexts(*contexts): \"\"\"Takes",
"self.context_instance = None def get_context_data(self): return {} def format_value(self, value): if self.is_localized: value",
"render(self, name, value, attrs=None, renderer=None): \"\"\" Returns this Widget rendered as HTML, as",
"return context def render(self, name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if",
"if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist is not None: context[\"datalist\"]",
"doesn't equal to True # See #25. if not isinstance(attr, bool): context[\"attrs\"][key] =",
"{} for d in context.dicts: flat.update(d) return flat else: return context def flatten_contexts(*contexts):",
"def render(self, name, value, attrs=None, renderer=None): \"\"\" Returns this Widget rendered as HTML,",
"True, } # True is injected in the context to allow stricter comparisons",
"from django.template import Context, loader from django.utils import formats from django.utils.encoding import force_text",
"attrs=None, renderer=None): \"\"\" Returns this Widget rendered as HTML, as a Unicode string.",
"comparisons # for widget attrs. See #25. if self.is_hidden: context[\"hidden\"] = True if",
"raise NotImplementedError(\"subclasses of Widget must provide a render() method\") def build_attrs(self, extra_attrs=None, **kwargs):",
"dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs # Backported from Django 1.7 @property",
"to allow stricter comparisons # for widget attrs. See #25. if self.is_hidden: context[\"hidden\"]",
"to provide a datalist widget. It has not been cleaned up yet. \"\"\"",
"cleaned up yet. \"\"\" from django import forms from django.template import Context, loader",
"__init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is not None: self.input_type = kwargs[\"attrs\"].pop(\"type\", self.input_type)",
"= flatten_contexts(self.context_instance, context) return loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\" input_type =",
"TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\",",
"\"\"\" Backported from Django 1.10 Helper function for building an attribute dictionary. \"\"\"",
"only as 'key' # Casting to a string so that it doesn't equal",
"django.template import Context, loader from django.utils import formats from django.utils.encoding import force_text class",
"provide a datalist widget. It has not been cleaned up yet. \"\"\" from",
"https://github.com/jazzband/django-floppyforms to provide a datalist widget. It has not been cleaned up yet.",
"return flat else: return context def flatten_contexts(*contexts): \"\"\"Takes a list of context instances",
"None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr, getattr(context, attr))",
"template_name = kwargs.pop(\"template_name\", None) if template_name is not None: self.template_name = template_name super(Input,",
"\"\"\" Returns this Widget rendered as HTML, as a Unicode string. The 'value'",
"so 'key=\"1\"' will show up only as 'key' # Casting to a string",
"return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type = None datalist = None",
"valid input, so subclass implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget",
"self.is_localized: value = formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs=None): context =",
"so subclass implementations should program defensively. \"\"\" raise NotImplementedError(\"subclasses of Widget must provide",
"= \"widgets/input.html\" input_type = None datalist = None def __init__(self, *args, **kwargs): datalist",
"value = formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs=None): context = {",
"pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context, Context): flat",
"as HTML, as a Unicode string. The 'value' given is not guaranteed to",
"def format_value(self, value): if self.is_localized: value = formats.localize_input(value) return force_text(value) def get_context(self, name,",
"else False # Backported from Django 1.9 if not hasattr(forms.Widget, \"format_value\"): def format_value(self,",
"\"\"\"Takes a list of context instances and returns a new dict that combines",
"forms from django.template import Context, loader from django.utils import formats from django.utils.encoding import",
"not None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs) # This attribute is used",
"format_value(self, value): if self.is_localized: value = formats.localize_input(value) return force_text(value) def get_context(self, name, value,",
"self).__init__(*args, **kwargs) # This attribute is used to inject a surrounding context in",
"all of them.\"\"\" new_context = DictContext() for context in contexts: if context is",
"for context in contexts: if context is not None: new_context.update(flatten_context(context)) for attr in",
"loader from django.utils import formats from django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES",
"for widget attrs. See #25. if self.is_hidden: context[\"hidden\"] = True if value is",
"# floppyforms templatetags, when rendered inside a complete form. self.context_instance = None def",
"\"\" if value != \"\": # Only add the value if it is",
"self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, } #",
"formats from django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\",",
"attr): setattr(new_context, attr, getattr(context, attr)) return new_context class Widget(forms.Widget): is_required = False def",
"hasattr(context, attr): setattr(new_context, attr, getattr(context, attr)) return new_context class Widget(forms.Widget): is_required = False",
"context in contexts: if context is not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES:",
"datalist is not None: self.datalist = datalist template_name = kwargs.pop(\"template_name\", None) if template_name",
"class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context,",
"isinstance(context, Context): flat = {} for d in context.dicts: flat.update(d) return flat else:",
"kwargs.pop(\"template_name\", None) if template_name is None: template_name = self.template_name context = self.get_context(name, value,",
"extracted from the unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget. It has not",
"extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10 Helper function for building an attribute",
"Helper function for building an attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if",
"def is_hidden(self): return self.input_type == \"hidden\" if hasattr(self, \"input_type\") else False # Backported",
"as 'key' # Casting to a string so that it doesn't equal to",
"= self.datalist return context def render(self, name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\",",
"None: template_name = self.template_name context = self.get_context(name, value, attrs=attrs or {}) context =",
"1.7 @property def is_hidden(self): return self.input_type == \"hidden\" if hasattr(self, \"input_type\") else False",
"@property def is_hidden(self): return self.input_type == \"hidden\" if hasattr(self, \"input_type\") else False #",
"datalist = kwargs.pop(\"datalist\", None) if datalist is not None: self.datalist = datalist template_name",
"loader.render_to_string(template_name, context) class TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\" def __init__(self, *args,",
"has not been cleaned up yet. \"\"\" from django import forms from django.template",
"self.datalist return context def render(self, name, value, attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None)",
"not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist is not None: context[\"datalist\"] =",
"this Widget rendered as HTML, as a Unicode string. The 'value' given is",
"return {} def format_value(self, value): if self.is_localized: value = formats.localize_input(value) return force_text(value) def",
"is None: value = \"\" if value != \"\": # Only add the",
") def flatten_context(context): if isinstance(context, Context): flat = {} for d in context.dicts:",
"attr == 1: # 1 == True so 'key=\"1\"' will show up only",
"self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type = None datalist = None def",
"if template_name is not None: self.template_name = template_name super(Input, self).__init__(*args, **kwargs) # This",
"add the value if it is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] =",
"hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value) class Input(Widget): template_name = \"widgets/input.html\" input_type",
"if not hasattr(forms.Widget, \"format_value\"): def format_value(self, value): return self._format_value(value) class Input(Widget): template_name =",
"widget. It has not been cleaned up yet. \"\"\" from django import forms",
"a complete form. self.context_instance = None def get_context_data(self): return {} def format_value(self, value):",
"injected in the context to allow stricter comparisons # for widget attrs. See",
"def get_context_data(self): return {} def format_value(self, value): if self.is_localized: value = formats.localize_input(value) return",
"of context instances and returns a new dict that combines all of them.\"\"\"",
"__init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist is not None: self.datalist",
"context[\"attrs\"] = self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if attr == 1: #",
"if hasattr(self, \"input_type\") else False # Backported from Django 1.9 if not hasattr(forms.Widget,",
"in context.dicts: flat.update(d) return flat else: return context def flatten_contexts(*contexts): \"\"\"Takes a list",
"Django 1.10 Helper function for building an attribute dictionary. \"\"\" attrs = dict(self.attrs,",
"= ( \"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context, Context): flat = {}",
"attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs # Backported from Django",
"*args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist is not None: self.datalist =",
"is non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr in",
"Casting to a string so that it doesn't equal to True # See",
"of Widget must provide a render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported",
"DictContext() for context in contexts: if context is not None: new_context.update(flatten_context(context)) for attr",
"= self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr in context[\"attrs\"].items(): if attr",
"context[\"attrs\"][key] = str(attr) if self.datalist is not None: context[\"datalist\"] = self.datalist return context",
"context[\"hidden\"] = True if value is None: value = \"\" if value !=",
"to inject a surrounding context in the # floppyforms templatetags, when rendered inside",
"guaranteed to be valid input, so subclass implementations should program defensively. \"\"\" raise",
"formats.localize_input(value) return force_text(value) def get_context(self, name, value, attrs=None): context = { \"widget\": self,",
"See #25. if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist is not",
"= None def __init__(self, *args, **kwargs): datalist = kwargs.pop(\"datalist\", None) if datalist is",
"unmaintained https://github.com/jazzband/django-floppyforms to provide a datalist widget. It has not been cleaned up",
"in the context to allow stricter comparisons # for widget attrs. See #25.",
"so that it doesn't equal to True # See #25. if not isinstance(attr,",
"string. The 'value' given is not guaranteed to be valid input, so subclass",
"This attribute is used to inject a surrounding context in the # floppyforms",
"def get_context(self, name, value, attrs=None): context = { \"widget\": self, \"type\": self.input_type, \"name\":",
"REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def flatten_context(context): if isinstance(context, Context): flat =",
"not been cleaned up yet. \"\"\" from django import forms from django.template import",
"provide a render() method\") def build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10",
"name, \"hidden\": self.is_hidden, \"required\": self.is_required, \"True\": True, } # True is injected in",
"def flatten_contexts(*contexts): \"\"\"Takes a list of context instances and returns a new dict",
"**kwargs): if kwargs.get(\"attrs\", None) is not None: self.input_type = kwargs[\"attrs\"].pop(\"type\", self.input_type) super(TextInput, self).__init__(*args,",
"build_attrs(self, extra_attrs=None, **kwargs): \"\"\" Backported from Django 1.10 Helper function for building an",
"is not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context, attr): setattr(new_context, attr,",
"is not None: self.datalist = datalist template_name = kwargs.pop(\"template_name\", None) if template_name is",
"context = { \"widget\": self, \"type\": self.input_type, \"name\": name, \"hidden\": self.is_hidden, \"required\": self.is_required,",
"surrounding context in the # floppyforms templatetags, when rendered inside a complete form.",
"True # See #25. if not isinstance(attr, bool): context[\"attrs\"][key] = str(attr) if self.datalist",
"context) class TextInput(Input): template_name = \"widgets/text.html\" input_type = \"text\" def __init__(self, *args, **kwargs):",
"None: self.datalist = datalist template_name = kwargs.pop(\"template_name\", None) if template_name is not None:",
"self.template_name = template_name super(Input, self).__init__(*args, **kwargs) # This attribute is used to inject",
"value is None: value = \"\" if value != \"\": # Only add",
"# True is injected in the context to allow stricter comparisons # for",
"\"text\" def __init__(self, *args, **kwargs): if kwargs.get(\"attrs\", None) is not None: self.input_type =",
"dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs) return attrs # Backported",
"self.datalist is not None: context[\"datalist\"] = self.datalist return context def render(self, name, value,",
"flatten_context(context): if isinstance(context, Context): flat = {} for d in context.dicts: flat.update(d) return",
"non-empty context[\"value\"] = self.format_value(value) context.update(self.get_context_data()) context[\"attrs\"] = self.build_attrs(attrs) for key, attr in context[\"attrs\"].items():",
"django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES = ( \"_form_config\", \"_form_render\", ) def",
"attrs=None, **kwargs): template_name = kwargs.pop(\"template_name\", None) if template_name is None: template_name = self.template_name",
"not None: self.datalist = datalist template_name = kwargs.pop(\"template_name\", None) if template_name is not",
"contexts: if context is not None: new_context.update(flatten_context(context)) for attr in REQUIRED_CONTEXT_ATTRIBTUES: if hasattr(context,",
"of them.\"\"\" new_context = DictContext() for context in contexts: if context is not",
"!= \"\": # Only add the value if it is non-empty context[\"value\"] =",
"from django.utils import formats from django.utils.encoding import force_text class DictContext(dict): pass REQUIRED_CONTEXT_ATTRIBTUES =",
"when rendered inside a complete form. self.context_instance = None def get_context_data(self): return {}",
"for building an attribute dictionary. \"\"\" attrs = dict(self.attrs, **kwargs) if extra_attrs: attrs.update(extra_attrs)",
"form. self.context_instance = None def get_context_data(self): return {} def format_value(self, value): if self.is_localized:"
] |
[
"not assigned Meraki will do their default which is LA time. #self.tags =",
"site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional - If not assigned Meraki",
"self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId = '' self.templateId ='' def create(self):",
"def __init__(self, site_name, serials): self.site_name = site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane'",
"type is entered, the network is changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard",
"Area import meraki, os # End Import Area class orgNetwork: def __init__(self, site_name,",
"= self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id'] claim",
"# Start Import Area import meraki, os # End Import Area class orgNetwork:",
"default which is LA time. #self.tags = ['TEST'] #Optional self.productTypes = ['appliance', 'switch',",
"self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional - If not assigned Meraki will",
"will do their default which is LA time. #self.tags = ['TEST'] #Optional self.productTypes",
"#Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When one or more product type is",
"'' self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id",
"['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When one or more product type",
"self.site_name = site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional - If not",
"time. #self.tags = ['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When one or",
"#When one or more product type is entered, the network is changed to",
"= serials self.networkId = '' self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name),",
"create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId =",
"new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id']",
"- If not assigned Meraki will do their default which is LA time.",
"self.networkId = '' self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags),",
"__init__(self, site_name, serials): self.site_name = site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional",
"entered, the network is changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key)",
"serials): self.site_name = site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional - If",
"orgNetwork: def __init__(self, site_name, serials): self.site_name = site_name self.organization_id = '' #self.timeZone =",
"If not assigned Meraki will do their default which is LA time. #self.tags",
"self.serials = serials self.networkId = '' self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id),",
"= site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional - If not assigned",
"combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId = ''",
"is entered, the network is changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard =",
"name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id'] claim = self.dashboard.networks.claimNetworkDevices(networkId=(self.networkId),",
"#self.tags = ['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When one or more",
"#Optional - If not assigned Meraki will do their default which is LA",
"do their default which is LA time. #self.tags = ['TEST'] #Optional self.productTypes =",
"is changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials",
"assigned Meraki will do their default which is LA time. #self.tags = ['TEST']",
"to combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId =",
"tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id'] claim = self.dashboard.networks.claimNetworkDevices(networkId=(self.networkId), serials=(self.serials)) bind",
"Start Import Area import meraki, os # End Import Area class orgNetwork: def",
"site_name, serials): self.site_name = site_name self.organization_id = '' #self.timeZone = 'Australia/Brisbane' #Optional -",
"or more product type is entered, the network is changed to combined api_key",
"product type is entered, the network is changed to combined api_key = os.environ.get('meraki_api_key')",
"api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId = '' self.templateId",
"the network is changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials",
"LA time. #self.tags = ['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When one",
"changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId",
"'wireless'] #When one or more product type is entered, the network is changed",
"class orgNetwork: def __init__(self, site_name, serials): self.site_name = site_name self.organization_id = '' #self.timeZone",
"= os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId = '' self.templateId =''",
"Area class orgNetwork: def __init__(self, site_name, serials): self.site_name = site_name self.organization_id = ''",
"meraki.DashboardAPI(api_key) self.serials = serials self.networkId = '' self.templateId ='' def create(self): new_network =",
"'' #self.timeZone = 'Australia/Brisbane' #Optional - If not assigned Meraki will do their",
"= '' self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone))",
"their default which is LA time. #self.tags = ['TEST'] #Optional self.productTypes = ['appliance',",
"network_id = new_network self.networkId = network_id['id'] claim = self.dashboard.networks.claimNetworkDevices(networkId=(self.networkId), serials=(self.serials)) bind = self.dashboard.networks.bindNetwork(networkId=(self.networkId),",
"self.productTypes = ['appliance', 'switch', 'wireless'] #When one or more product type is entered,",
"'switch', 'wireless'] #When one or more product type is entered, the network is",
"['appliance', 'switch', 'wireless'] #When one or more product type is entered, the network",
"='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network",
"self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id'] claim =",
"Import Area import meraki, os # End Import Area class orgNetwork: def __init__(self,",
"End Import Area class orgNetwork: def __init__(self, site_name, serials): self.site_name = site_name self.organization_id",
"#self.timeZone = 'Australia/Brisbane' #Optional - If not assigned Meraki will do their default",
"productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id'] claim = self.dashboard.networks.claimNetworkDevices(networkId=(self.networkId), serials=(self.serials))",
"= ['appliance', 'switch', 'wireless'] #When one or more product type is entered, the",
"Meraki will do their default which is LA time. #self.tags = ['TEST'] #Optional",
"network is changed to combined api_key = os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials =",
"os.environ.get('meraki_api_key') self.dashboard = meraki.DashboardAPI(api_key) self.serials = serials self.networkId = '' self.templateId ='' def",
"more product type is entered, the network is changed to combined api_key =",
"serials self.networkId = '' self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes),",
"# End Import Area class orgNetwork: def __init__(self, site_name, serials): self.site_name = site_name",
"= ['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When one or more product",
"one or more product type is entered, the network is changed to combined",
"<gh_stars>0 # Start Import Area import meraki, os # End Import Area class",
"Import Area class orgNetwork: def __init__(self, site_name, serials): self.site_name = site_name self.organization_id =",
"self.templateId ='' def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id =",
"which is LA time. #self.tags = ['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless']",
"def create(self): new_network = self.dashboard.organizations.createOrganizationNetwork(organizationId=(self.organization_id), name=(self.site_name), productTypes=(self.productTypes), tags=(self.tags), timeZone=(self.timeZone)) network_id = new_network self.networkId",
"import meraki, os # End Import Area class orgNetwork: def __init__(self, site_name, serials):",
"os # End Import Area class orgNetwork: def __init__(self, site_name, serials): self.site_name =",
"'Australia/Brisbane' #Optional - If not assigned Meraki will do their default which is",
"= meraki.DashboardAPI(api_key) self.serials = serials self.networkId = '' self.templateId ='' def create(self): new_network",
"meraki, os # End Import Area class orgNetwork: def __init__(self, site_name, serials): self.site_name",
"= 'Australia/Brisbane' #Optional - If not assigned Meraki will do their default which",
"is LA time. #self.tags = ['TEST'] #Optional self.productTypes = ['appliance', 'switch', 'wireless'] #When",
"= new_network self.networkId = network_id['id'] claim = self.dashboard.networks.claimNetworkDevices(networkId=(self.networkId), serials=(self.serials)) bind = self.dashboard.networks.bindNetwork(networkId=(self.networkId), configTemplateId=(self.templateId))",
"timeZone=(self.timeZone)) network_id = new_network self.networkId = network_id['id'] claim = self.dashboard.networks.claimNetworkDevices(networkId=(self.networkId), serials=(self.serials)) bind =",
"= '' #self.timeZone = 'Australia/Brisbane' #Optional - If not assigned Meraki will do"
] |
[
"# Local from .xpath_condition import XPathCondition from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- #",
"# ------------------------------------------------------ Imports ----------------------------------------------------- # # System from typing import Optional # Local",
"-------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init",
"------------------------------------------------------ Imports ----------------------------------------------------- # # System from typing import Optional # Local from",
"None, value: Optional[any] = None, **kwargs ): super().__init__( name=name, value=value, condition_type=XPathConditionType.CONTAINS, **kwargs )",
"Init --------------------------------------------------- # def __init__( self, name: Optional[str] = None, value: Optional[any] =",
"import XPathCondition from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains",
".enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class",
"from typing import Optional # Local from .xpath_condition import XPathCondition from .enums import",
"import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition):",
"= None, value: Optional[any] = None, **kwargs ): super().__init__( name=name, value=value, condition_type=XPathConditionType.CONTAINS, **kwargs",
"= None, **kwargs ): super().__init__( name=name, value=value, condition_type=XPathConditionType.CONTAINS, **kwargs ) # -------------------------------------------------------------------------------------------------------------------- #",
".xpath_condition import XPathCondition from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class:",
"from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------ #",
"self, name: Optional[str] = None, value: Optional[any] = None, **kwargs ): super().__init__( name=name,",
"# # ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init ---------------------------------------------------",
"Optional[str] = None, value: Optional[any] = None, **kwargs ): super().__init__( name=name, value=value, condition_type=XPathConditionType.CONTAINS,",
"class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def __init__(",
"System from typing import Optional # Local from .xpath_condition import XPathCondition from .enums",
"class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def __init__( self, name: Optional[str] =",
"typing import Optional # Local from .xpath_condition import XPathCondition from .enums import XPathConditionType",
"XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def __init__( self, name: Optional[str] = None,",
"--------------------------------------------------- # def __init__( self, name: Optional[str] = None, value: Optional[any] = None,",
"------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def",
"Local from .xpath_condition import XPathCondition from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # #",
"XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): #",
"from .xpath_condition import XPathCondition from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # -------------------------------------------",
"------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def __init__( self, name:",
"__init__( self, name: Optional[str] = None, value: Optional[any] = None, **kwargs ): super().__init__(",
"# def __init__( self, name: Optional[str] = None, value: Optional[any] = None, **kwargs",
"XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def __init__( self,",
"import Optional # Local from .xpath_condition import XPathCondition from .enums import XPathConditionType #",
"# -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # ---------------------------------------------------",
"Optional # Local from .xpath_condition import XPathCondition from .enums import XPathConditionType # --------------------------------------------------------------------------------------------------------------------",
"# class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- # def __init__( self, name: Optional[str]",
"XPathCondition from .enums import XPathConditionType # -------------------------------------------------------------------------------------------------------------------- # # ------------------------------------------- class: XPathConditionContains ------------------------------------------",
"----------------------------------------------------- # # System from typing import Optional # Local from .xpath_condition import",
"# ------------------------------------------- class: XPathConditionContains ------------------------------------------ # class XPathConditionContains(XPathCondition): # --------------------------------------------------- Init --------------------------------------------------- #",
"def __init__( self, name: Optional[str] = None, value: Optional[any] = None, **kwargs ):",
"Optional[any] = None, **kwargs ): super().__init__( name=name, value=value, condition_type=XPathConditionType.CONTAINS, **kwargs ) # --------------------------------------------------------------------------------------------------------------------",
"Imports ----------------------------------------------------- # # System from typing import Optional # Local from .xpath_condition",
"--------------------------------------------------- Init --------------------------------------------------- # def __init__( self, name: Optional[str] = None, value: Optional[any]",
"# System from typing import Optional # Local from .xpath_condition import XPathCondition from",
"<gh_stars>0 # ------------------------------------------------------ Imports ----------------------------------------------------- # # System from typing import Optional #",
"name: Optional[str] = None, value: Optional[any] = None, **kwargs ): super().__init__( name=name, value=value,",
"# --------------------------------------------------- Init --------------------------------------------------- # def __init__( self, name: Optional[str] = None, value:",
"value: Optional[any] = None, **kwargs ): super().__init__( name=name, value=value, condition_type=XPathConditionType.CONTAINS, **kwargs ) #",
"# # System from typing import Optional # Local from .xpath_condition import XPathCondition"
] |
[
"class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order',",
"dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order', name='price', ),",
"migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True), ), migrations.AddField( model_name='order',",
"model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp', field=models.DateTimeField(auto_now_add=True,",
"migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8,",
"= [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average",
"from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ]",
"), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp',",
"default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True),",
"max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status',",
"22:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'),",
"model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2,",
"preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField(",
"django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations",
"default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price',",
"[ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price',",
"field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price',",
"max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order',",
"max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField(",
"] operations = [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2,",
"), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price',",
"on 2022-01-08 22:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [",
"help_text='filled average price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity',",
"name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'),",
"'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop",
"price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order",
"migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status',",
"3.2.8 on 2022-01-08 22:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies =",
"), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order',",
"name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp', field=models.DateTimeField(auto_now_add=True, help_text='order",
"max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ),",
"field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp',",
"help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ),",
"help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A',",
"by Django 3.2.8 on 2022-01-08 22:59 from django.db import migrations, models class Migration(migrations.Migration):",
"name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order',",
"), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True), ), migrations.AddField(",
"null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order',",
"migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True,",
"name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2,",
"average price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8),",
"field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price',",
"name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled",
"model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A',",
"status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True),",
"migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp',",
"help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp', field=models.DateTimeField(auto_now_add=True, help_text='order submission timestamp'), ),",
"quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField(",
"<gh_stars>1-10 # Generated by Django 3.2.8 on 2022-01-08 22:59 from django.db import migrations,",
"Django 3.2.8 on 2022-01-08 22:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies",
"), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order',",
"price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp', field=models.DateTimeField(auto_now_add=True, help_text='order submission timestamp'), ), ]",
"help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8,",
"field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0,",
"models class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField(",
"name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit",
"name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True), ),",
"model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField(",
"help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8,",
"('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price',",
"[ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order',",
"= [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField(",
"decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')],",
"('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True, decimal_places=2,",
"'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ), migrations.AddField( model_name='order', name='stop_price', field=models.DecimalField(blank=True,",
"model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True),",
"model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled average price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity',",
"migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True,",
"decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp', field=models.DateTimeField(auto_now_add=True, help_text='order submission timestamp'),",
"Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order', name='price',",
"migrations, models class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations = [",
"), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False,",
"price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ),",
"migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1), preserve_default=False, ),",
"2022-01-08 22:59 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tradingbot',",
"operations = [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='filled",
"import migrations, models class Migration(migrations.Migration): dependencies = [ ('tradingbot', '0002_alter_stockinstance_user'), ] operations =",
"# Generated by Django 3.2.8 on 2022-01-08 22:59 from django.db import migrations, models",
"timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ),",
"field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F',",
"null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True), ), migrations.AddField(",
"field=models.DecimalField(blank=True, decimal_places=2, help_text='stop price', max_digits=8, null=True), ), migrations.AlterField( model_name='order', name='timestamp', field=models.DateTimeField(auto_now_add=True, help_text='order submission",
"null=True), ), migrations.AddField( model_name='order', name='status', field=models.CharField(choices=[('A', 'Accepted'), ('F', 'Filled')], default='A', help_text='order status', max_length=1),",
"decimal_places=2, help_text='filled average price', max_digits=8, null=True), ), migrations.AddField( model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled",
"model_name='order', name='filled_quantity', field=models.DecimalField(decimal_places=2, default=0, help_text='filled quantity', max_digits=8), ), migrations.AddField( model_name='order', name='filled_timestamp', field=models.DateTimeField(blank=True, help_text='order",
"'0002_alter_stockinstance_user'), ] operations = [ migrations.RemoveField( model_name='order', name='price', ), migrations.AddField( model_name='order', name='filled_avg_price', field=models.DecimalField(blank=True,",
"filled timestamp', null=True), ), migrations.AddField( model_name='order', name='limit_price', field=models.DecimalField(blank=True, decimal_places=2, help_text='limit price', max_digits=8, null=True),",
"Generated by Django 3.2.8 on 2022-01-08 22:59 from django.db import migrations, models class"
] |
[
"def do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(','))",
"except IOError: config_string = None c = configparser.ConfigParser() success = False if config_string:",
"== 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE in BSD",
"'testcli': raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def do_debug(a): global debug_done if",
"\"\"\" direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name",
"modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done =",
"= super().parse_known_args(*args, **kwargs) try: a = t[0] except ValueError: a = t return",
"p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command !=",
"configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if not success: c.read_string(config_string, source=a.config) success =",
"'-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a):",
"and rollup data, see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def",
"config_string = None c = configparser.ConfigParser() success = False if config_string: try: c.read_string(config_string,",
"argparse, configparser, sys, os from . import common, metadata, observations, rollup, w1datapoint import",
"default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get basic stuff from config file,",
"\"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command')",
"from raw observation JSON blobs \"\"\" direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0])",
"modules: logger.setLevel(logging.DEBUG) if 'common' in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata'",
"the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a)",
"testcli_command(): \"\"\" Confidence the CLI is doing the needful \"\"\" p = LocalArgumentParser()",
"not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if 'commands' in modules or",
"= super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain a collection of monthly",
"os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location)))",
"None c = configparser.ConfigParser() success = False if config_string: try: c.read_string(config_string, source=a.config) success",
"blobs \"\"\" direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if",
"do_debug(a) if a.command == 'rollup': return rollup_command() if a.command == 'testcli': return testcli_command()",
"= False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?',",
"= set(a.debug.split(',')) if 'commands' in modules or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common'",
"= None c = configparser.ConfigParser() success = False if config_string: try: c.read_string(config_string, source=a.config)",
"import argparse, configparser, sys, os from . import common, metadata, observations, rollup, w1datapoint",
"or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all' in modules:",
"'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE in BSD sysexits,",
"do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if",
"'w1datapoint' in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main():",
"def main(): \"\"\" CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive()",
"p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a))",
"# EX_USAGE in BSD sysexits, just to pick a standard if __name__ ==",
"precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser(",
"def parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\"",
"if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE",
"modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all' in",
"rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI is doing the needful",
"from . import common, metadata, observations, rollup, w1datapoint import logging logger = logging.getLogger(__name__)",
"p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy",
"*args, **kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain a",
"debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if 'commands' in modules or 'all'",
"stuff from config file, from env vars, and from command line \"\"\" #",
"a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def do_debug(a): global",
"debug_done = True def main(): \"\"\" CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command')",
"if 'w1datapoint' in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def",
"os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs)",
"a.debug: modules = set(a.debug.split(',')) if 'commands' in modules or 'all' in modules: logger.setLevel(logging.DEBUG)",
"LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c',",
"file, from env vars, and from command line \"\"\" # Set defaults here,",
"logging logger = logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs):",
"Maintain a collection of monthly rollup JSON blobs from raw observation JSON blobs",
"t = super().parse_known_args(*args, **kwargs) try: a = t[0] except ValueError: a = t",
"self._resolve(a) def rollup_command(): \"\"\" Maintain a collection of monthly rollup JSON blobs from",
"None or a.raw_location is None: logger.error(\"Need dirs for raw and rollup data, see",
"a.rollup_location is None or a.raw_location is None: logger.error(\"Need dirs for raw and rollup",
"a bad idea raw_location = None rollup_location = None try: config_string = open(a.config,",
"True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if not success: c.read_string(config_string, source=a.config)",
"success = True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if not success:",
"goofy with CLI logic\") print(repr(a)) def do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\")",
"a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args, **kwargs):",
"'observations' in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or",
"or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\" CLI operation",
"is doing the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a =",
"metadata, observations, rollup, w1datapoint import logging logger = logging.getLogger(__name__) debug_done = False class",
"'commands' in modules or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in modules or",
"logger.setLevel(logging.DEBUG) if 'common' in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in",
"observation JSON blobs \"\"\" direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p =",
"if not success: c.read_string(config_string, source=a.config) success = True if success: if not a.raw_location:",
"t return self._resolve(a) def parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a)",
"if config_string: try: c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\"",
"os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args,",
"parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain",
"config_string = open(a.config, 'r').read() except IOError: config_string = None c = configparser.ConfigParser() success",
"**kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain a collection",
"= t[0] except ValueError: a = t return self._resolve(a) def parse_args(self, *args, **kwargs):",
"or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all' in modules:",
"return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI is doing the",
"modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\" CLI",
"for raw and rollup data, see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location),",
"CLI logic\") print(repr(a)) def do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug:",
"in modules: logger.setLevel(logging.DEBUG) if 'common' in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if",
"source=a.config) success = True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if not",
"rollup data, see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command():",
"'rollup': return rollup_command() if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return",
"t[0] except ValueError: a = t return self._resolve(a) def parse_args(self, *args, **kwargs): a",
"global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if 'commands'",
"do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def",
"= open(a.config, 'r').read() except IOError: config_string = None c = configparser.ConfigParser() success =",
"try: c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string",
"set(a.debug.split(',')) if 'commands' in modules or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in",
"rollup_command() if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 #",
"from env vars, and from command line \"\"\" # Set defaults here, unless",
"import common, metadata, observations, rollup, w1datapoint import logging logger = logging.getLogger(__name__) debug_done =",
"logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug',",
"debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if 'commands' in",
"command {}\".format(a.command))) return 64 # EX_USAGE in BSD sysexits, just to pick a",
"const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get",
"not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args,",
"config_string: try: c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" +",
"False if config_string: try: c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError: config_string =",
"# command-line option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not",
"rollup_location))) return a def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs) try: a",
"\"\"\" # Set defaults here, unless a bad idea raw_location = None rollup_location",
"LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise",
"in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\" CLI operation \"\"\" p",
"blobs from raw observation JSON blobs \"\"\" direct_name = \"w1rollup\" _, applied_name =",
"modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all' in",
"action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with",
"\"\"\" p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command == 'rollup':",
"# EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI is",
"= None rollup_location = None try: config_string = open(a.config, 'r').read() except IOError: config_string",
"basic stuff from config file, from env vars, and from command line \"\"\"",
"'metadata' in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or",
"success = True if success: if not a.raw_location: # command-line option takes precedence",
"in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all'",
"debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d',",
"modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint'",
"get basic stuff from config file, from env vars, and from command line",
"a = t return self._resolve(a) def parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs)",
"rollup JSON blobs from raw observation JSON blobs \"\"\" direct_name = \"w1rollup\" _,",
"raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def do_debug(a): global debug_done if not",
"and from command line \"\"\" # Set defaults here, unless a bad idea",
"\"\"\" Maintain a collection of monthly rollup JSON blobs from raw observation JSON",
"os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return",
"= t return self._resolve(a) def parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs) return",
"except ValueError: a = t return self._resolve(a) def parse_args(self, *args, **kwargs): a =",
"{}\".format(a.command))) return 64 # EX_USAGE in BSD sysexits, just to pick a standard",
"= True def main(): \"\"\" CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a",
"= \"[global]\\n\" + config_string if not success: c.read_string(config_string, source=a.config) success = True if",
"'common' in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or",
"Set defaults here, unless a bad idea raw_location = None rollup_location = None",
"a = super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain a collection of",
"Confidence the CLI is doing the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option',",
"raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def",
"of monthly rollup JSON blobs from raw observation JSON blobs \"\"\" direct_name =",
"is None or a.raw_location is None: logger.error(\"Need dirs for raw and rollup data,",
"c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs) try:",
"or a.raw_location is None: logger.error(\"Need dirs for raw and rollup data, see --help\")",
"super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None)",
"observations, rollup, w1datapoint import logging logger = logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser):",
"common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in",
"a): \"\"\" get basic stuff from config file, from env vars, and from",
"= \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name != direct_name:",
"or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all' in modules:",
"self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get basic stuff from config",
"logger.error(\"Need dirs for raw and rollup data, see --help\") sys.exit(64) # EX_USAGE return",
"applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location is None or",
"False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all')",
"sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE in BSD sysexits, just to pick",
"self._resolve(a) def parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command():",
"super().parse_args(*args, **kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain a collection of monthly rollup",
"if 'commands' in modules or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in modules",
"logic\") print(repr(a)) def do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules",
"p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return",
"IOError: config_string = None c = configparser.ConfigParser() success = False if config_string: try:",
"python import argparse, configparser, sys, os from . import common, metadata, observations, rollup,",
"modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all' in",
"#! /usr/bin/env python import argparse, configparser, sys, os from . import common, metadata,",
"def _resolve(self, a): \"\"\" get basic stuff from config file, from env vars,",
"*args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None)",
"\"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command",
"except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if not success: c.read_string(config_string, source=a.config) success",
"operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command ==",
"a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE in",
"in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\"",
"nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\"",
"rollup_command(): \"\"\" Maintain a collection of monthly rollup JSON blobs from raw observation",
"modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations'",
"= True if success: if not a.raw_location: # command-line option takes precedence a.raw_location",
"idea raw_location = None rollup_location = None try: config_string = open(a.config, 'r').read() except",
"= os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args()",
"observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in",
"c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a",
"self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self,",
"= True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if not success: c.read_string(config_string,",
"LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location is",
"modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup'",
"sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI",
"command-line option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location:",
"if 'metadata' in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules",
"JSON blobs from raw observation JSON blobs \"\"\" direct_name = \"w1rollup\" _, applied_name",
"def rollup_command(): \"\"\" Maintain a collection of monthly rollup JSON blobs from raw",
"modules = set(a.debug.split(',')) if 'commands' in modules or 'all' in modules: logger.setLevel(logging.DEBUG) if",
"try: config_string = open(a.config, 'r').read() except IOError: config_string = None c = configparser.ConfigParser()",
"\"[global]\\n\" + config_string if not success: c.read_string(config_string, source=a.config) success = True if success:",
"= os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location',",
"in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if",
"w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\" CLI operation \"\"\" p = LocalArgumentParser()",
"= False if config_string: try: c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError: config_string",
"default=None) def _resolve(self, a): \"\"\" get basic stuff from config file, from env",
"a = p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return rollup_command() if a.command ==",
"= None try: config_string = open(a.config, 'r').read() except IOError: config_string = None c",
"if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if 'commands' in modules",
"if applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location is None",
"default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get basic stuff from",
"return rollup_command() if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64",
"or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in modules or 'all' in modules:",
"--help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the",
"monthly rollup JSON blobs from raw observation JSON blobs \"\"\" direct_name = \"w1rollup\"",
"ValueError: a = t return self._resolve(a) def parse_args(self, *args, **kwargs): a = super().parse_args(*args,",
"rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True",
"logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules = set(a.debug.split(',')) if 'commands' in modules or 'all' in",
"config file, from env vars, and from command line \"\"\" # Set defaults",
"self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get basic stuff from config file, from",
"raw and rollup data, see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location))",
"self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get basic",
"unless a bad idea raw_location = None rollup_location = None try: config_string =",
"a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args, **kwargs): t",
"!= direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location is None or a.raw_location",
"= os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self, *args, **kwargs): t =",
"return a def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs) try: a =",
"= configparser.ConfigParser() success = False if config_string: try: c.read_string(config_string, source=a.config) success = True",
"here, unless a bad idea raw_location = None rollup_location = None try: config_string",
"p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location is None or a.raw_location is None:",
"rollup_location = None try: config_string = open(a.config, 'r').read() except IOError: config_string = None",
"**kwargs): t = super().parse_known_args(*args, **kwargs) try: a = t[0] except ValueError: a =",
"config_string if not success: c.read_string(config_string, source=a.config) success = True if success: if not",
"a def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs) try: a = t[0]",
"direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location is None or a.raw_location is",
"'-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def _resolve(self, a): \"\"\" get basic stuff",
"c = configparser.ConfigParser() success = False if config_string: try: c.read_string(config_string, source=a.config) success =",
"success: if not a.raw_location: # command-line option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location',",
"<reponame>swork/w1-datalogger #! /usr/bin/env python import argparse, configparser, sys, os from . import common,",
"= p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with CLI logic\")",
"a.command == 'rollup': return rollup_command() if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command",
"in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all'",
"if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location', c['global'].get('rollup_location', rollup_location))) return a def parse_args_permissive(self,",
"CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command",
"in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if",
"in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done",
"direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name !=",
"\"\"\" CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if",
"sys, os from . import common, metadata, observations, rollup, w1datapoint import logging logger",
"p = LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if",
"p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return rollup_command() if a.command == 'testcli': return",
"if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def do_debug(a):",
"return self._resolve(a) def parse_args(self, *args, **kwargs): a = super().parse_args(*args, **kwargs) return self._resolve(a) def",
"JSON blobs \"\"\" direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser()",
"None: logger.error(\"Need dirs for raw and rollup data, see --help\") sys.exit(64) # EX_USAGE",
"collection of monthly rollup JSON blobs from raw observation JSON blobs \"\"\" direct_name",
"in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if",
"if 'observations' in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules",
"**kwargs) return self._resolve(a) def rollup_command(): \"\"\" Maintain a collection of monthly rollup JSON",
"return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE in BSD sysexits, just",
"'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all' in modules: metadata.logger.setLevel(logging.DEBUG)",
"modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all' in",
"modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\" CLI operation \"\"\" p =",
"super().parse_known_args(*args, **kwargs) try: a = t[0] except ValueError: a = t return self._resolve(a)",
"__init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location',",
"if a.command == 'rollup': return rollup_command() if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown",
"from command line \"\"\" # Set defaults here, unless a bad idea raw_location",
"print(repr(a)) def do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if a.debug: modules =",
"= p.parse_args() do_debug(a) if a.rollup_location is None or a.raw_location is None: logger.error(\"Need dirs",
"RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def do_debug(a): global debug_done if not debug_done:",
"bad idea raw_location = None rollup_location = None try: config_string = open(a.config, 'r').read()",
"w1datapoint import logging logger = logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self,",
"or 'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all' in modules:",
"env vars, and from command line \"\"\" # Set defaults here, unless a",
"'rollup' in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or",
". import common, metadata, observations, rollup, w1datapoint import logging logger = logging.getLogger(__name__) debug_done",
"main(): \"\"\" CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a)",
"= logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs)",
"rollup, w1datapoint import logging logger = logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def",
"EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI is doing",
"p.parse_args() do_debug(a) if a.rollup_location is None or a.raw_location is None: logger.error(\"Need dirs for",
"EX_USAGE in BSD sysexits, just to pick a standard if __name__ == '__main__':",
"do_debug(a) if a.rollup_location is None or a.raw_location is None: logger.error(\"Need dirs for raw",
"the CLI is doing the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true')",
"True if success: if not a.raw_location: # command-line option takes precedence a.raw_location =",
"'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG)",
"configparser.ConfigParser() success = False if config_string: try: c.read_string(config_string, source=a.config) success = True except",
"logger = logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args,",
"None try: config_string = open(a.config, 'r').read() except IOError: config_string = None c =",
"if 'rollup' in modules or 'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules",
"_resolve(self, a): \"\"\" get basic stuff from config file, from env vars, and",
"not success: c.read_string(config_string, source=a.config) success = True if success: if not a.raw_location: #",
"doing the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args()",
"needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if",
"*args, **kwargs): t = super().parse_known_args(*args, **kwargs) try: a = t[0] except ValueError: a",
"p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return rollup_command() if a.command",
"line \"\"\" # Set defaults here, unless a bad idea raw_location = None",
"vars, and from command line \"\"\" # Set defaults here, unless a bad",
"\"\"\" get basic stuff from config file, from env vars, and from command",
"'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG) debug_done = True def main(): \"\"\" CLI operation \"\"\"",
"c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError: config_string = \"[global]\\n\" + config_string if",
"!= 'testcli': raise RuntimeError(\"something's goofy with CLI logic\") print(repr(a)) def do_debug(a): global debug_done",
"a = t[0] except ValueError: a = t return self._resolve(a) def parse_args(self, *args,",
"a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location = os.path.expanduser( os.environ.get('rollup_location',",
"'r').read() except IOError: config_string = None c = configparser.ConfigParser() success = False if",
"class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config',",
"try: a = t[0] except ValueError: a = t return self._resolve(a) def parse_args(self,",
"c.read_string(config_string, source=a.config) success = True if success: if not a.raw_location: # command-line option",
"os from . import common, metadata, observations, rollup, w1datapoint import logging logger =",
"'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG)",
"a = p.parse_args() do_debug(a) if a.rollup_location is None or a.raw_location is None: logger.error(\"Need",
"**kwargs) try: a = t[0] except ValueError: a = t return self._resolve(a) def",
"in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in modules or 'all'",
"common, metadata, observations, rollup, w1datapoint import logging logger = logging.getLogger(__name__) debug_done = False",
"def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\"))",
"from config file, from env vars, and from command line \"\"\" # Set",
"def parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs) try: a = t[0] except",
"def testcli_command(): \"\"\" Confidence the CLI is doing the needful \"\"\" p =",
"LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return rollup_command() if",
"parse_args_permissive(self, *args, **kwargs): t = super().parse_known_args(*args, **kwargs) try: a = t[0] except ValueError:",
"success = False if config_string: try: c.read_string(config_string, source=a.config) success = True except configparser.MissingSectionHeaderError:",
"option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location",
"raw_location = None rollup_location = None try: config_string = open(a.config, 'r').read() except IOError:",
"success: c.read_string(config_string, source=a.config) success = True if success: if not a.raw_location: # command-line",
"config_string = \"[global]\\n\" + config_string if not success: c.read_string(config_string, source=a.config) success = True",
"import logging logger = logging.getLogger(__name__) debug_done = False class LocalArgumentParser(argparse.ArgumentParser): def __init__(self, *args,",
"if 'common' in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules",
"= p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return rollup_command() if a.command == 'testcli':",
"return 64 # EX_USAGE in BSD sysexits, just to pick a standard if",
"_, applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a",
"= LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command != 'testcli':",
"'all' in modules: rollup.logger.setLevel(logging.DEBUG) if 'w1datapoint' in modules or 'all' in modules: w1datapoint.logger.setLevel(logging.DEBUG)",
"modules or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in modules or 'all' in",
"if a.rollup_location is None or a.raw_location is None: logger.error(\"Need dirs for raw and",
"# Set defaults here, unless a bad idea raw_location = None rollup_location =",
"a collection of monthly rollup JSON blobs from raw observation JSON blobs \"\"\"",
"True def main(): \"\"\" CLI operation \"\"\" p = LocalArgumentParser() p.add_argument('command') a =",
"a = p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's goofy with CLI",
"os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI is doing the needful \"\"\"",
"in modules or 'all' in modules: common.logger.setLevel(logging.DEBUG) if 'metadata' in modules or 'all'",
"defaults here, unless a bad idea raw_location = None rollup_location = None try:",
"see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence",
"if success: if not a.raw_location: # command-line option takes precedence a.raw_location = os.path.expanduser(",
"p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a = p.parse_args() do_debug(a) if a.testcli_command != 'testcli': raise RuntimeError(\"something's",
"data, see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup( os.path.expanduser(a.rollup_location), os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\"",
"in BSD sysexits, just to pick a standard if __name__ == '__main__': sys.exit(main())",
"/usr/bin/env python import argparse, configparser, sys, os from . import common, metadata, observations,",
"None rollup_location = None try: config_string = open(a.config, 'r').read() except IOError: config_string =",
"takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if not a.rollup_location: a.rollup_location =",
"os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a)",
"a.raw_location is None: logger.error(\"Need dirs for raw and rollup data, see --help\") sys.exit(64)",
"= LocalArgumentParser() p.add_argument('command') a = p.parse_args_permissive() do_debug(a) if a.command == 'rollup': return rollup_command()",
"in modules or 'all' in modules: logger.setLevel(logging.DEBUG) if 'common' in modules or 'all'",
"testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command))) return 64 # EX_USAGE in BSD sysexits, just to",
"= LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a = p.parse_args() do_debug(a) if a.rollup_location",
"== 'rollup': return rollup_command() if a.command == 'testcli': return testcli_command() sys.stderr.write(argparse.ArgumentError(\"Unknown command {}\".format(a.command)))",
"source=a.config) success = True if success: if not a.raw_location: # command-line option takes",
"configparser, sys, os from . import common, metadata, observations, rollup, w1datapoint import logging",
"+ config_string if not success: c.read_string(config_string, source=a.config) success = True if success: if",
"not a.raw_location: # command-line option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location)))",
"CLI is doing the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command') p.add_argument('--option', action='store_true') a",
"open(a.config, 'r').read() except IOError: config_string = None c = configparser.ConfigParser() success = False",
"a.raw_location: # command-line option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location', raw_location))) if",
"return self._resolve(a) def rollup_command(): \"\"\" Maintain a collection of monthly rollup JSON blobs",
"metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG) if 'rollup' in",
"os.path.expanduser(a.raw_location)) def testcli_command(): \"\"\" Confidence the CLI is doing the needful \"\"\" p",
"raw observation JSON blobs \"\"\" direct_name = \"w1rollup\" _, applied_name = os.path.split(sys.argv[0]) p",
"64 # EX_USAGE in BSD sysexits, just to pick a standard if __name__",
"is None: logger.error(\"Need dirs for raw and rollup data, see --help\") sys.exit(64) #",
"\"\"\" Confidence the CLI is doing the needful \"\"\" p = LocalArgumentParser() p.add_argument('testcli_command')",
"**kwargs): super().__init__(*args, **kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location',",
"**kwargs) self.add_argument('--debug', '-d', nargs='?', const='all') self.add_argument('--config', '-c', default=os.path.expanduser(\"~/.w1.conf\")) self.add_argument('--rollup-location', default=None) self.add_argument('--raw-location', default=None) def",
"dirs for raw and rollup data, see --help\") sys.exit(64) # EX_USAGE return rollup.do_rollup(",
"applied_name = os.path.split(sys.argv[0]) p = LocalArgumentParser() if applied_name != direct_name: p.add_argument('rollup_command') a =",
"if not a.raw_location: # command-line option takes precedence a.raw_location = os.path.expanduser( os.environ.get('raw_location', c['global'].get('raw_location',",
"'all' in modules: metadata.logger.setLevel(logging.DEBUG) if 'observations' in modules or 'all' in modules: observations.logger.setLevel(logging.DEBUG)",
"command line \"\"\" # Set defaults here, unless a bad idea raw_location =",
"with CLI logic\") print(repr(a)) def do_debug(a): global debug_done if not debug_done: logging.basicConfig(format=\"%(levelname)s:%(filename)s:%(lineno)d:%(message)s\") if",
"if a.debug: modules = set(a.debug.split(',')) if 'commands' in modules or 'all' in modules:"
] |
[
"= os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main scripts",
"and test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes))",
"# Declares the target path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training,",
"charcoal dataset \"\"\" # Imports library import os, sys # used to declare",
"scripts if __name__ == \"__main__\": # Declares the target path for the dataset",
"the instantiation of PyTorch dataloaders over the cleaned images from the Laboratório Visão",
"dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes)) print(\"\\nImported datasets\")",
"Imagem charcoal dataset \"\"\" # Imports library import os, sys # used to",
"sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main scripts if __name__ == \"__main__\": #",
"= \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path)",
"classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes)) print(\"\\nImported datasets\") print(datasets)",
"__name__ == \"__main__\": # Declares the target path for the dataset ufpr_dataset_path =",
"= generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes)) print(\"\\nImported datasets\") print(datasets) print(\"\\nImported dataloaders\") print(dataloaders)",
"parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main scripts if __name__",
"the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\") classes, datasets,",
"dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\") classes, datasets, dataloaders",
"the target path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and",
"over the cleaned images from the Laboratório Visão Robótica e Imagem charcoal dataset",
"Laboratório Visão Robótica e Imagem charcoal dataset \"\"\" # Imports library import os,",
"target path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test",
"../charcoal as a folder that can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir =",
"images from the Laboratório Visão Robótica e Imagem charcoal dataset \"\"\" # Imports",
"# Imports library import os, sys # used to declare ../charcoal as a",
"that can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders",
"path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\")",
"# used to declare ../charcoal as a folder that can be imported currentdir",
"import generate_dataloaders # Main scripts if __name__ == \"__main__\": # Declares the target",
"Main scripts if __name__ == \"__main__\": # Declares the target path for the",
"# Main scripts if __name__ == \"__main__\": # Declares the target path for",
"from charcoal.generator_dataloaders import generate_dataloaders # Main scripts if __name__ == \"__main__\": # Declares",
"cleaned images from the Laboratório Visão Robótica e Imagem charcoal dataset \"\"\" #",
"os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main scripts if __name__ == \"__main__\":",
"dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes)) print(\"\\nImported datasets\") print(datasets) print(\"\\nImported dataloaders\")",
"import os, sys # used to declare ../charcoal as a folder that can",
"ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\") classes, datasets, dataloaders =",
"Declares the target path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation,",
"declare ../charcoal as a folder that can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir",
"imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders #",
"datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes)) print(\"\\nImported datasets\") print(datasets) print(\"\\nImported",
"used to declare ../charcoal as a folder that can be imported currentdir =",
"== \"__main__\": # Declares the target path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\"",
"library import os, sys # used to declare ../charcoal as a folder that",
"currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main",
"\"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\")",
"\"\"\" Tests the instantiation of PyTorch dataloaders over the cleaned images from the",
"\"__main__\": # Declares the target path for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports",
"dataset \"\"\" # Imports library import os, sys # used to declare ../charcoal",
"test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\", \".join(classes)) print(\"\\nImported",
"= os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main scripts if __name__ ==",
"the cleaned images from the Laboratório Visão Robótica e Imagem charcoal dataset \"\"\"",
"from the Laboratório Visão Robótica e Imagem charcoal dataset \"\"\" # Imports library",
"os, sys # used to declare ../charcoal as a folder that can be",
"of PyTorch dataloaders over the cleaned images from the Laboratório Visão Robótica e",
"Visão Robótica e Imagem charcoal dataset \"\"\" # Imports library import os, sys",
"a folder that can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir)",
"the Laboratório Visão Robótica e Imagem charcoal dataset \"\"\" # Imports library import",
"validation, and test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\") print(\",",
"PyTorch dataloaders over the cleaned images from the Laboratório Visão Robótica e Imagem",
"print(\"Imports training, validation, and test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported",
"as a folder that can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir)",
"os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders # Main scripts if",
"dataloaders over the cleaned images from the Laboratório Visão Robótica e Imagem charcoal",
"<gh_stars>0 \"\"\" Tests the instantiation of PyTorch dataloaders over the cleaned images from",
"Robótica e Imagem charcoal dataset \"\"\" # Imports library import os, sys #",
"generate_dataloaders # Main scripts if __name__ == \"__main__\": # Declares the target path",
"e Imagem charcoal dataset \"\"\" # Imports library import os, sys # used",
"sys # used to declare ../charcoal as a folder that can be imported",
"for the dataset ufpr_dataset_path = \"dataset_ufpr/cleaned/\" print(\"Imports training, validation, and test dataloaders\") classes,",
"folder that can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from",
"can be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import",
"be imported currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) from charcoal.generator_dataloaders import generate_dataloaders",
"Imports library import os, sys # used to declare ../charcoal as a folder",
"to declare ../charcoal as a folder that can be imported currentdir = os.path.dirname(os.path.realpath(__file__))",
"charcoal.generator_dataloaders import generate_dataloaders # Main scripts if __name__ == \"__main__\": # Declares the",
"training, validation, and test dataloaders\") classes, datasets, dataloaders = generate_dataloaders(ufpr_dataset_path) print(\"DONE\") print(\"\\nImported Classes\")",
"instantiation of PyTorch dataloaders over the cleaned images from the Laboratório Visão Robótica",
"Tests the instantiation of PyTorch dataloaders over the cleaned images from the Laboratório",
"if __name__ == \"__main__\": # Declares the target path for the dataset ufpr_dataset_path",
"\"\"\" # Imports library import os, sys # used to declare ../charcoal as"
] |
[] |
[
"noqa from . import res_company # noqa from . import res_users_pass_history # noqa",
"Copyright 2015 LasLabs Inc. # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import",
"from . import res_company # noqa from . import res_users_pass_history # noqa from",
"# noqa from . import res_users_pass_history # noqa from . import res_config_settings #",
". import res_users # noqa from . import res_company # noqa from .",
"Inc. # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa",
"LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa from . import",
"noqa from . import res_users_pass_history # noqa from . import res_config_settings # noqa",
"import res_company # noqa from . import res_users_pass_history # noqa from . import",
"2015 LasLabs Inc. # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users",
"LasLabs Inc. # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users #",
"res_users # noqa from . import res_company # noqa from . import res_users_pass_history",
"# Copyright 2015 LasLabs Inc. # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from .",
". import res_company # noqa from . import res_users_pass_history # noqa from .",
"License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa from .",
"res_company # noqa from . import res_users_pass_history # noqa from . import res_config_settings",
"later (http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa from . import res_company #",
"# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa from",
"# noqa from . import res_company # noqa from . import res_users_pass_history #",
"or later (http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa from . import res_company",
"(http://www.gnu.org/licenses/lgpl.html). from . import res_users # noqa from . import res_company # noqa",
"import res_users # noqa from . import res_company # noqa from . import",
"from . import res_users # noqa from . import res_company # noqa from"
] |
[
"node_id = fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str() port = fields.Str() node_state",
"Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from marshmallow import fields",
"AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str() port = fields.Str()",
"marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address",
"# Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from marshmallow import",
"private_ip_address = fields.Str() public_ip_address = fields.Str() port = fields.Str() node_state = fields.Str() current_job_name",
"reserved. # --------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta):",
"Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta",
"fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str()",
"from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str() public_ip_address",
"--------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id =",
"Corporation. All rights reserved. # --------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import",
"# --------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id",
"class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str() port =",
"= fields.Str() public_ip_address = fields.Str() port = fields.Str() node_state = fields.Str() current_job_name =",
"fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str() port = fields.Str() node_state = fields.Str()",
"# --------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from",
"from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str()",
"= fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str() port = fields.Str() node_state =",
"--------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from marshmallow",
"azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str() public_ip_address =",
"All rights reserved. # --------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta",
"(c) Microsoft Corporation. All rights reserved. # --------------------------------------------------------- from marshmallow import fields from",
"import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address =",
"fields.Str() public_ip_address = fields.Str() port = fields.Str() node_state = fields.Str() current_job_name = fields.Str()",
"rights reserved. # --------------------------------------------------------- from marshmallow import fields from azure.ai.ml._schema.core.schema_meta import PatchedSchemaMeta class",
"import PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str()",
"PatchedSchemaMeta class AmlComputeNodeInfoSchema(metaclass=PatchedSchemaMeta): node_id = fields.Str() private_ip_address = fields.Str() public_ip_address = fields.Str() port"
] |
[
"entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, )",
"entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, )",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, )",
"id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50',",
"id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0',",
"register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100',",
") register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0',",
"nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, )",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000,",
") register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-30x30-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom30x30Plus', max_episode_steps=10000000000000, nondeterministic=True, )",
"register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20',",
") register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0',",
") register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0',",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True,",
"id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000,",
"nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, )",
"register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75',",
"register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11',",
"id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000,",
"register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60',",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-30x30-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom30x30Plus', max_episode_steps=10000000000000,",
") register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0',",
"id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000,",
"id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3',",
"register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5',",
") register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True,",
"id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0',",
"nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, )",
"nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True,",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000,",
"register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5',",
"id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000,",
"entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000,",
"max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True,",
"entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, )",
"from gym.envs.registration import register register( id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000,",
"entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17',",
") register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, )",
"register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0',",
"max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register(",
"register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100',",
"max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register(",
"register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0',",
"id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000,",
"register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register(",
"register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000,",
"entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000,",
") register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0',",
"id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000,",
"id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000,",
"entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000,",
"register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0',",
"register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000,",
"entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000,",
") register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0',",
"max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register(",
"entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, )",
"<gh_stars>0 from gym.envs.registration import register register( id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5',",
") register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
") register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0',",
"max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register(",
"entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11',",
") register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, )",
"nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True,",
"max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register(",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000,",
") register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0',",
"entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True,",
"entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, )",
"id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3',",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000,",
"id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25',",
"id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-30x30-plus-v0',",
"register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0',",
"register( id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5',",
"register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17',",
"entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, )",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000,",
"register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8',",
"entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-30x30-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom30x30Plus',",
"nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register(",
"entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000,",
"max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register(",
"nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True,",
"entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000,",
") register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, )",
") register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register(",
") register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0',",
"id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000,",
") register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, )",
"entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, )",
") register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
") register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, )",
"register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0',",
"register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0',",
"register( id='maze-warehouse-8x8-v0', entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14',",
") register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0',",
"id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000,",
"id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000,",
") register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register(",
") register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, )",
"id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus',",
"entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus',",
"entry_point='gym_maze.envs:MazeEnvWarehouse8x8', max_episode_steps=10000000000000, ) register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, )",
") register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0',",
"register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10',",
"id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-10x10-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x20-plus-v0',",
"register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10',",
"max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0', entry_point='gym_maze.envs:MazeEnvWarehouse17x17', max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register(",
"register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-10x10-v0', entry_point='gym_maze.envs:MazeEnvSample10x10', max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True,",
"entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, )",
"register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0',",
"register register( id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0',",
") register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, )",
"id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000,",
"max_episode_steps=10000000000000, ) register( id='maze-empty-5x5-v0', entry_point='gym_maze.envs:MazeEnvEmpty5x5', max_episode_steps=10000000000000, ) register( id='maze-warehouse-5x5-v0', entry_point='gym_maze.envs:MazeEnvWarehouse5x5', max_episode_steps=10000000000000, ) register(",
"import register register( id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, ) register(",
"max_episode_steps=10000000000000, ) register( id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, )",
") register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, ) register(",
"gym.envs.registration import register register( id='maze-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=2000, ) register( id='maze-sample-5x5-v0', entry_point='gym_maze.envs:MazeEnvSample5x5', max_episode_steps=10000000000000, )",
"id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0',",
"max_episode_steps=10000000000000, ) register( id='maze-random-75x75-v0', entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register(",
") register( id='maze-warehouse-11x11-v0', entry_point='gym_maze.envs:MazeEnvWarehouse11x11', max_episode_steps=10000000000000, ) register( id='maze-warehouse-14x14-v0', entry_point='gym_maze.envs:MazeEnvWarehouse14x14', max_episode_steps=10000000000000, ) register( id='maze-warehouse-17x17-v0',",
"max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000,",
"max_episode_steps=10000000000000, ) register( id='maze-random-3x3-v0', entry_point='gym_maze.envs:MazeEnvRandom3x3', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-25x25-v0', entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, )",
"max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True,",
"entry_point='gym_maze.envs:MazeEnvRandom25x25', max_episode_steps=10000000000000, ) register( id='maze-random-50x50-v0', entry_point='gym_maze.envs:MazeEnvRandom50x50', max_episode_steps=10000000000000, ) register( id='maze-random-60x60-v0', entry_point='gym_maze.envs:MazeEnvRandom60x60', max_episode_steps=10000000000000, )",
"id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0',",
"entry_point='gym_maze.envs:MazeEnvRandom75x75', max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True,",
"id='maze-warehouse-20x20-v0', entry_point='gym_maze.envs:MazeEnvWarehouse20x20', max_episode_steps=10000000000000, ) register( id='maze-random-5x5-v0', entry_point='gym_maze.envs:MazeEnvRandom5x5', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8',",
"max_episode_steps=10000000000000, ) register( id='maze-random-10x10-v0', entry_point='gym_maze.envs:MazeEnvRandom10x10', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-sample-3x3-v0', entry_point='gym_maze.envs:MazeEnvSample3x3', max_episode_steps=10000000000000, )",
"entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14',",
"max_episode_steps=10000000000000, ) register( id='maze-sample-100x100-v0', entry_point='gym_maze.envs:MazeEnvSample100x100', max_episode_steps=10000000000000, ) register( id='maze-random-100x100-v0', entry_point='gym_maze.envs:MazeEnvRandom100x100', max_episode_steps=10000000000000, nondeterministic=True, )",
"register( id='maze-random-14x14-v0', entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register(",
"nondeterministic=True, ) register( id='maze-random-20x20-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20Plus', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-30x30-plus-v0', entry_point='gym_maze.envs:MazeEnvRandom30x30Plus', max_episode_steps=10000000000000, nondeterministic=True,",
"nondeterministic=True, ) register( id='maze-random-8x8-v0', entry_point='gym_maze.envs:MazeEnvRandom8x8', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-11x11-v0', entry_point='gym_maze.envs:MazeEnvRandom11x11', max_episode_steps=10000000000000, nondeterministic=True,",
"entry_point='gym_maze.envs:MazeEnvRandom14x14', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-17x17-v0', entry_point='gym_maze.envs:MazeEnvRandom17x17', max_episode_steps=10000000000000, nondeterministic=True, ) register( id='maze-random-20x120-v0', entry_point='gym_maze.envs:MazeEnvRandom20x20',"
] |