ngram
listlengths
0
67.8k
[ ". import testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self):", "methods \"\"\" import unittest from . import testcase from . import testing class", "basic tests for recover methods \"\"\" import unittest from . import testcase from", "\"\"\" Amqp basic tests for recover methods \"\"\" import unittest from . import", "Amqp basic tests for recover methods \"\"\" import unittest from . import testcase", "class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield", ". import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine", "unittest from . import testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine", "RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from", "testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self):", "def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def", "\"\"\" import unittest from . import testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase,", "unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False)", "from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True)", "import unittest from . import testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase):", "recover methods \"\"\" import unittest from . import testcase from . import testing", "yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self): result", "from . import testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def", "test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self): result = yield from self.channel.basic_recover(requeue=True) self.assertTrue(result)", "@testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine", "test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self):", "testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from", "for recover methods \"\"\" import unittest from . import testcase from . import", "@testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self): result = yield from", "import testcase from . import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield", "tests for recover methods \"\"\" import unittest from . import testcase from .", "self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self): result = yield", "import testing class RecoverTestCase(testcase.RabbitTestCase, unittest.TestCase): @testing.coroutine def test_basic_recover_async(self): yield from self.channel.basic_recover_async(requeue=True) @testing.coroutine def", "def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self): result = yield from self.channel.basic_recover(requeue=True)", "from self.channel.basic_recover_async(requeue=True) @testing.coroutine def test_basic_recover_async_no_requeue(self): yield from self.channel.basic_recover_async(requeue=False) @testing.coroutine def test_basic_recover(self): result =" ]
[ "Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del", "Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import", "CONFIG from pd.common import SOCKET import os import mutex import time import threading", "0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label", "\"MON_SERVER: Next id starts at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def", "def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info", "return self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\":", "\"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label):", "%d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private def", "self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def", "info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id)", "at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private", "self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id starts at %d\"%self.next_id def", "data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id starts at", "session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES", "PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username):", "import CONFIG from pd.common import SOCKET import os import mutex import time import", "self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print", "self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file", "info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id]", "self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try:", "#!/usr/bin/python from pd.common import CONFIG from pd.common import SOCKET import os import mutex", "self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing", "print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__ ==", "self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire()", "SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def", "try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__ == \"__main__\": server=SERVER() SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server)", "username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id)", "print \"MON_SERVER: Next id starts at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass", "time import threading import pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define", "Next id starts at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host):", "except: pass print \"MON_SERVER: Next id starts at %d\"%self.next_id def Client_Connect(self,x): pass def", "del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key):", "# Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup", "pass def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally:", "__init__(self): self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop", "raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id]", "import os import mutex import time import threading import pickle class SERVER: def", "def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\")", "mutex import time import threading import pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1", "not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id)", "ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None", "<reponame>schinmayee/nimbus #!/usr/bin/python from pd.common import CONFIG from pd.common import SOCKET import os import", "import pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock()", "self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__ == \"__main__\": server=SERVER() SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server) #", "self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id)", "self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except:", "starts at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass #", "loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1", "pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int:", "pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not", "self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\":", "if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id starts at %d\"%self.next_id def Client_Connect(self,x):", "Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start()", "id starts at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass", "os import mutex import time import threading import pickle class SERVER: def __init__(self):", "self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return", "def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\":", "class SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30", "Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time()", "Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0):", "read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id", "Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1", "self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread()", "def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value", "# private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id):", "self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1:", "return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value):", "self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self):", "def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1: time.sleep(self.backup_interval)", "self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass", "%d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions", "session id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client):", "from pd.common import SOCKET import os import mutex import time import threading import", "try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id starts at %d\"%self.next_id", "Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif", "if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id", "Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if", "id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES def", "private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise", "pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"]", "# read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next", "pd.common import SOCKET import os import mutex import time import threading import pickle", "try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\"", "import SOCKET import os import mutex import time import threading import pickle class", "self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del", "import time import threading import pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1 #", "type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id)", "elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id):", "RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() #", "Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def", "self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in", "def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise", "Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid", "def Client_Connect(self,x): pass def Client_Disconnect(self,x): pass def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id):", "from pd.common import CONFIG from pd.common import SOCKET import os import mutex import", "def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass", "finally: self.mutex.release() import socket if __name__ == \"__main__\": server=SERVER() SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server) # SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server, #", "SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 #", "SOCKET import os import mutex import time import threading import pickle class SERVER:", "session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id):", "\"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def", "del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\" try:", "import threading import pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define RPC", "# PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions def", "raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) #", "self.mutex.release() import socket if __name__ == \"__main__\": server=SERVER() SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server) # SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server, # (CONFIG.server_private_key_file,CONFIG.server_certificate_file,CONFIG.ca_certificate_file))", "return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info", "def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try:", "interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read", "Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def", "def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id", "def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid", "pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__ == \"__main__\": server=SERVER() SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server) # SOCKET.SERVER(socket.gethostbyname(CONFIG.pdmon_server_host),CONFIG.pdmon_server_port,server,", "in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id starts", "self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER: Next id starts at %d\"%self.next_id def Client_Connect(self,x): pass", "# Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\"))", "self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id) del self.sessions[session_id] def Label_Session(self,client,session_id,label): self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def", "Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}}", "import mutex import time import threading import pickle class SERVER: def __init__(self): self.sessions={}", "\"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__ == \"__main__\":", "1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if", "self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return self.sessions def Create_Session(self,client,username): session_id,directory=None,None session_id=self.next_id self.next_id+=1 info={\"id\":session_id,", "self.Validate_Session_Id(session_id) self.sessions[session_id][\"label\"]=label def Update_Status(self,client,session_id,key,value): self.Validate_Session_Id(session_id) self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key]", "def __init__(self): self.sessions={} self.next_id=1 # Define RPC interface self.mutex=threading.Lock() self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save", "Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session", "session_id=self.next_id self.next_id+=1 info={\"id\":session_id, \"label\": \"<unnamed>\",\"username\": username,\"created_date\":time.time(),\"last_update\": 0,\"user_status\":{}} self.sessions[session_id]=info return info def Delete_Session(self,client,session_id): self.Validate_Session_Id(session_id)", "time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__", "self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return", "self.sessions[session_id][\"user_status\"][key]=value self.sessions[session_id][\"last_update\"]=time.time() def Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while", "self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except:", "pd.common import CONFIG from pd.common import SOCKET import os import mutex import time", "Remove_Status_If_Exists(self,client,session_id,key): self.Validate_Session_Id(session_id) try: del self.sessions[session_id][\"user_status\"][key] except: pass def Backup(self): while 1: time.sleep(self.backup_interval) print", "up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket if __name__ == \"__main__\": server=SERVER()", "threading import pickle class SERVER: def __init__(self): self.sessions={} self.next_id=1 # Define RPC interface", "SOCKET.COMMAND_EXCEPTION(\"Invalid session id\") elif not self.sessions.has_key(session_id): raise SOCKET.COMMAND_EXCEPTION(\"Invalid session id %d\"%session_id) # PUBLIC", "except: pass def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\"))", "while 1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release() import socket", "id %d\"%session_id) # PUBLIC ROUTINES def Session_Info(self,client,session_id): self.Validate_Session_Id(session_id) return self.sessions[session_id] def Session_List(self,client): return", "self.commands=[\"Register_Client\",\"Session_Info\",\"Session_List\",\"Create_Session\",\"Delete_Session\",\"Label_Session\",\"Update_Status\",\"Remove_Status_If_Exists\"] self.backup_interval=30 # Save loop self.session_file=CONFIG.pdmon_session_file self.backup_thread=threading.Thread() self.backup_thread.run=self.Backup self.backup_thread.start() # read in data", "pass print \"MON_SERVER: Next id starts at %d\"%self.next_id def Client_Connect(self,x): pass def Client_Disconnect(self,x):", "def Backup(self): while 1: time.sleep(self.backup_interval) print \"Backing up...\" try: self.mutex.acquire() pickle.dump(self.sessions,open(self.session_file,\"w\")) finally: self.mutex.release()", "pass def Register_Client(self,client_id,user,host): pass # private def Validate_Session_Id(self,session_id): if type(session_id)!=int: raise SOCKET.COMMAND_EXCEPTION(\"Invalid session", "self.backup_thread.start() # read in data try: self.sessions=pickle.load(open(self.session_file,\"r\")) if(len(self.sessions.keys())>0): self.next_id=max(self.sessions.keys())+1 except: pass print \"MON_SERVER:" ]
[ "69, 72, 76, 73], [1, 1, 4, 2, 1, 1, 0, 0])] for", "List class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if not T: return", "<reponame>HearyShen/leetcode-cn import time from typing import List class Solution: def dailyTemperatures(self, T: List[int])", "time from typing import List class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]:", "range(len(T)): # print([(i, T[i]) for i in stack], (i, T[i])) if not stack:", "class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if not T: return []", "record and pop all the colder days in stack j = len(stack) -", "ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret == ans}, return {ret} in", "in range(len(T)): # print([(i, T[i]) for i in stack], (i, T[i])) if not", "[1, 1, 4, 2, 1, 1, 0, 0])] for i, testCase in enumerate(testCases):", "time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret == ans}, return {ret}", "T: List[int]) -> List[int]: if not T: return [] deltaDays = [0] *", "if not T: return [] deltaDays = [0] * len(T) stack = []", "T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop() j -= 1 stack.append(i) return deltaDays", "# record and pop all the colder days in stack j = len(stack)", "1 while j >= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]] = i -", "the colder days in stack j = len(stack) - 1 while j >=", "[([73, 74, 75, 71, 69, 72, 76, 73], [1, 1, 4, 2, 1,", "T[i])) if not stack: stack.append(i) continue # record and pop all the colder", "temperatures, ans = testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time()", "def dailyTemperatures(self, T: List[int]) -> List[int]: if not T: return [] deltaDays =", "tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret == ans},", "return deltaDays if __name__ == \"__main__\": testCases = [([73, 74, 75, 71, 69,", "import time from typing import List class Solution: def dailyTemperatures(self, T: List[int]) ->", "T[stack[j]] < T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop() j -= 1 stack.append(i)", "i, testCase in enumerate(testCases): temperatures, ans = testCase tic = time.time() ret =", "from typing import List class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if", "-= 1 stack.append(i) return deltaDays if __name__ == \"__main__\": testCases = [([73, 74,", "for i, testCase in enumerate(testCases): temperatures, ans = testCase tic = time.time() ret", "testCases = [([73, 74, 75, 71, 69, 72, 76, 73], [1, 1, 4,", "in stack], (i, T[i])) if not stack: stack.append(i) continue # record and pop", "return [] deltaDays = [0] * len(T) stack = [] for i in", "T[i]) for i in stack], (i, T[i])) if not stack: stack.append(i) continue #", "all the colder days in stack j = len(stack) - 1 while j", "= i - stack[j] stack.pop() j -= 1 stack.append(i) return deltaDays if __name__", "Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if not T: return [] deltaDays", "\"__main__\": testCases = [([73, 74, 75, 71, 69, 72, 76, 73], [1, 1,", "(i, T[i])) if not stack: stack.append(i) continue # record and pop all the", "1, 1, 0, 0])] for i, testCase in enumerate(testCases): temperatures, ans = testCase", "and T[stack[j]] < T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop() j -= 1", "1, 0, 0])] for i, testCase in enumerate(testCases): temperatures, ans = testCase tic", "= [] for i in range(len(T)): # print([(i, T[i]) for i in stack],", "2, 1, 1, 0, 0])] for i, testCase in enumerate(testCases): temperatures, ans =", "* len(T) stack = [] for i in range(len(T)): # print([(i, T[i]) for", "4, 2, 1, 1, 0, 0])] for i, testCase in enumerate(testCases): temperatures, ans", "[] for i in range(len(T)): # print([(i, T[i]) for i in stack], (i,", "typing import List class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if not", "import List class Solution: def dailyTemperatures(self, T: List[int]) -> List[int]: if not T:", "= testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret", "testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret ==", "pop all the colder days in stack j = len(stack) - 1 while", "__name__ == \"__main__\": testCases = [([73, 74, 75, 71, 69, 72, 76, 73],", "= len(stack) - 1 while j >= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]]", "0 and T[stack[j]] < T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop() j -=", "73], [1, 1, 4, 2, 1, 1, 0, 0])] for i, testCase in", "stack: stack.append(i) continue # record and pop all the colder days in stack", "75, 71, 69, 72, 76, 73], [1, 1, 4, 2, 1, 1, 0,", "not stack: stack.append(i) continue # record and pop all the colder days in", "-> List[int]: if not T: return [] deltaDays = [0] * len(T) stack", "76, 73], [1, 1, 4, 2, 1, 1, 0, 0])] for i, testCase", "len(T) stack = [] for i in range(len(T)): # print([(i, T[i]) for i", "0, 0])] for i, testCase in enumerate(testCases): temperatures, ans = testCase tic =", "colder days in stack j = len(stack) - 1 while j >= 0", "stack.pop() j -= 1 stack.append(i) return deltaDays if __name__ == \"__main__\": testCases =", "74, 75, 71, 69, 72, 76, 73], [1, 1, 4, 2, 1, 1,", "i - stack[j] stack.pop() j -= 1 stack.append(i) return deltaDays if __name__ ==", "if __name__ == \"__main__\": testCases = [([73, 74, 75, 71, 69, 72, 76,", "print([(i, T[i]) for i in stack], (i, T[i])) if not stack: stack.append(i) continue", "# print([(i, T[i]) for i in stack], (i, T[i])) if not stack: stack.append(i)", "stack.append(i) return deltaDays if __name__ == \"__main__\": testCases = [([73, 74, 75, 71,", "dailyTemperatures(self, T: List[int]) -> List[int]: if not T: return [] deltaDays = [0]", "for i in range(len(T)): # print([(i, T[i]) for i in stack], (i, T[i]))", "= time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret == ans}, return", "stack = [] for i in range(len(T)): # print([(i, T[i]) for i in", "in enumerate(testCases): temperatures, ans = testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc", "stack], (i, T[i])) if not stack: stack.append(i) continue # record and pop all", "- 1 while j >= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]] = i", "[] deltaDays = [0] * len(T) stack = [] for i in range(len(T)):", "== \"__main__\": testCases = [([73, 74, 75, 71, 69, 72, 76, 73], [1,", "= Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}: {ret == ans}, return {ret} in {toc-tic:.3f}s.\")", "not T: return [] deltaDays = [0] * len(T) stack = [] for", "testCase in enumerate(testCases): temperatures, ans = testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures)", "continue # record and pop all the colder days in stack j =", "< T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop() j -= 1 stack.append(i) return", "stack j = len(stack) - 1 while j >= 0 and T[stack[j]] <", ">= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop() j", "0])] for i, testCase in enumerate(testCases): temperatures, ans = testCase tic = time.time()", "enumerate(testCases): temperatures, ans = testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc =", "i in range(len(T)): # print([(i, T[i]) for i in stack], (i, T[i])) if", "j >= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]] = i - stack[j] stack.pop()", "1 stack.append(i) return deltaDays if __name__ == \"__main__\": testCases = [([73, 74, 75,", "= [([73, 74, 75, 71, 69, 72, 76, 73], [1, 1, 4, 2,", "deltaDays[stack[j]] = i - stack[j] stack.pop() j -= 1 stack.append(i) return deltaDays if", "if not stack: stack.append(i) continue # record and pop all the colder days", "- stack[j] stack.pop() j -= 1 stack.append(i) return deltaDays if __name__ == \"__main__\":", "j = len(stack) - 1 while j >= 0 and T[stack[j]] < T[i]:", "ans = testCase tic = time.time() ret = Solution().dailyTemperatures(temperatures) toc = time.time() print(f\"{i}:", "while j >= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]] = i - stack[j]", "72, 76, 73], [1, 1, 4, 2, 1, 1, 0, 0])] for i,", "j -= 1 stack.append(i) return deltaDays if __name__ == \"__main__\": testCases = [([73,", "71, 69, 72, 76, 73], [1, 1, 4, 2, 1, 1, 0, 0])]", "len(stack) - 1 while j >= 0 and T[stack[j]] < T[i]: deltaDays[stack[j]] =", "deltaDays = [0] * len(T) stack = [] for i in range(len(T)): #", "[0] * len(T) stack = [] for i in range(len(T)): # print([(i, T[i])", "in stack j = len(stack) - 1 while j >= 0 and T[stack[j]]", "stack[j] stack.pop() j -= 1 stack.append(i) return deltaDays if __name__ == \"__main__\": testCases", "stack.append(i) continue # record and pop all the colder days in stack j", "days in stack j = len(stack) - 1 while j >= 0 and", "deltaDays if __name__ == \"__main__\": testCases = [([73, 74, 75, 71, 69, 72,", "for i in stack], (i, T[i])) if not stack: stack.append(i) continue # record", "i in stack], (i, T[i])) if not stack: stack.append(i) continue # record and", "T: return [] deltaDays = [0] * len(T) stack = [] for i", "and pop all the colder days in stack j = len(stack) - 1", "List[int]) -> List[int]: if not T: return [] deltaDays = [0] * len(T)", "1, 4, 2, 1, 1, 0, 0])] for i, testCase in enumerate(testCases): temperatures,", "= [0] * len(T) stack = [] for i in range(len(T)): # print([(i,", "List[int]: if not T: return [] deltaDays = [0] * len(T) stack =" ]
[ "in obj.garbagetype_set.all()] class Meta: model = Location fields = ( 'id', 'address', 'open_time',", "TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField()", "obj.collector else -1 class Meta: model = Transaction fields = ( 'id', 'datetime',", "{'required': False}, 'location': {'required': False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id =", "get_location_id(obj): return obj.location.id if obj.location else -1 @staticmethod def get_user_id(obj): return obj.user.id if", "get_user_id(obj): return obj.user.id if obj.user else -1 class Meta: model = CommercialRequest fields", "= CommercialRequest fields = ( 'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user'", "gt in obj.garbagetype_set.all()] class Meta: model = Location fields = ( 'id', 'address',", "from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time =", "= ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required': False}, 'open_time': {'required': False},", "'' @staticmethod def get_location_id(obj): return obj.location.id if obj.location else -1 @staticmethod def get_user_id(obj):", "def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()]", "class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location", ") validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types =", "= { 'address': {'required': False}, 'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required':", "class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = (", "CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField()", "'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), ) extra_kwargs = {", "} class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id", "= Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' )", "= serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at", "'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), ) extra_kwargs = { 'address':", "'garbage_type': {'required': False}, 'mass': {'required': False}, 'status': {'required': False}, 'location': {'required': False}, 'user':", "model = Transaction fields = ( 'id', 'garbage_type', 'points', 'mass', 'user', 'collector' )", "'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id", "recycle.models import Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer):", "{'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id =", "Meta: model = Transaction fields = ( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id',", "serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d,", "model = CommercialRequest fields = ( 'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status',", "'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id", "id = serializers.ReadOnlyField() class Meta: model = Transaction fields = ( 'id', 'garbage_type',", "CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = ( 'id',", "serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = ( 'id', 'address', 'date', 'garbage_type',", ") class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields =", "if obj.user else -1 class Meta: model = CommercialRequest fields = ( 'id',", "'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "= serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod", "'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = Transaction fields", "get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id", "%H:%M') @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 @staticmethod def get_collector_id(obj):", "= serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types =", "IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest", "'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) class", "= ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model", "if obj.location else -1 @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1", "serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod", "= serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M')", "'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class Meta: model =", "open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod", "= { 'address': {'required': False}, 'date': {'required': False}, 'garbage_type': {'required': False}, 'mass': {'required':", "id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types", "'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class", "'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types =", "obj.location else -1 @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 class", "def get_user_id(obj): return obj.user.id if obj.user else -1 @staticmethod def get_collector_id(obj): return obj.collector.id", "= ( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id", "'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types", "'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date')", "= ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id", "( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'),", "for gt in obj.garbagetype_set.all()] class Meta: model = Location fields = ( 'id',", "serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj):", "IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required': False}, 'open_time': {'required': False}, 'close_time': {'required':", "} class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id", "get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id if", "def get_location_id(obj): return obj.location.id if obj.location else -1 @staticmethod def get_user_id(obj): return obj.user.id", "'garbage_types': {'required': False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email", "= serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def", "class Meta: model = Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg',", "{'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner': {'required': False} } class", "( IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required': False}, 'date': {'required': False}, 'garbage_type':", "'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class", "DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields", "'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location fields =", "fields = ( 'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators", "= Transaction fields = ( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' )", "return obj.location.id if obj.location else -1 @staticmethod def get_user_id(obj): return obj.user.id if obj.user", "obj.user else -1 class Meta: model = CommercialRequest fields = ( 'id', 'address',", "def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return", "'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model =", "'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta:", "-1 @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 class Meta: model", "'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), ) extra_kwargs =", "extra_kwargs = { 'address': {'required': False}, 'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg':", "'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'),", "'date': {'required': False}, 'garbage_type': {'required': False}, 'mass': {'required': False}, 'status': {'required': False}, 'location':", "model = Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner'", "serializers.ReadOnlyField() class Meta: model = Transaction fields = ( 'id', 'garbage_type', 'points', 'mass',", "class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id =", "{'required': False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id =", "@staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 class Meta: model =", "obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id if obj.user else", "'location', 'user' ) validators = ( IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required':", "'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), ) extra_kwargs", "Transaction fields = ( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class", "Meta: model = Transaction fields = ( 'id', 'garbage_type', 'points', 'mass', 'user', 'collector'", "LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField()", "False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField()", "{'required': False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime =", "'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id =", "{'required': False}, 'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required':", "'location', 'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id =", "'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), )", "model = CommercialRequest fields = ( 'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location',", "import serializers from recycle.models import Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator,", "gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class Meta: model = Location fields =", "} for gt in obj.garbagetype_set.all()] class Meta: model = Location fields = (", "id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod", "False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField()", "= serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if obj.user else '' @staticmethod def", "serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if obj.user else '' @staticmethod def get_location_id(obj):", "else -1 @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 class Meta:", "@staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long':", ") validators = ( IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required': False}, 'date':", "validators = ( IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required': False}, 'date': {'required':", "'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields", "def get_user_id(obj): return obj.user.id if obj.user else -1 class Meta: model = CommercialRequest", ") extra_kwargs = { 'address': {'required': False}, 'open_time': {'required': False}, 'close_time': {'required': False},", "( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required': False}, 'open_time': {'required': False}, 'close_time':", "( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id =", "def get_email(obj): return obj.user.email if obj.user else '' @staticmethod def get_location_id(obj): return obj.location.id", "class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = (", "get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() }", "'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta:", ") extra_kwargs = { 'address': {'required': False}, 'date': {'required': False}, 'garbage_type': {'required': False},", "class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location", "= ( 'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators =", ") class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model =", "validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required': False}, 'open_time': {'required':", "'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer):", "'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id", "'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id =", "{ 'address': {'required': False}, 'date': {'required': False}, 'garbage_type': {'required': False}, 'mass': {'required': False},", "= serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def", "from rest_framework import serializers from recycle.models import Location, CommercialRequest, Transaction from recycle.validators import", "IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField()", "EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = ( 'id',", "obj.user.id if obj.user else -1 @staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector else", "fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer):", "serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField()", "else -1 class Meta: model = Transaction fields = ( 'id', 'datetime', 'garbage_type',", "gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class Meta: model = Location", "email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return", "'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs =", "else -1 class Meta: model = CommercialRequest fields = ( 'id', 'address', 'email',", "user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y", "False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner': {'required': False}", "'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), )", "def get_collector_id(obj): return obj.collector.id if obj.collector else -1 class Meta: model = Transaction", "= ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class", "CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = Transaction fields = ( 'id',", "validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False)", "garbage_types = serializers.ListField(required=False) class Meta: model = Location fields = ( 'id', 'address',", "serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location fields = ( 'id',", "recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField()", "'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address':", ") validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required': False}, 'open_time':", "obj.user else '' @staticmethod def get_location_id(obj): return obj.location.id if obj.location else -1 @staticmethod", "= ( 'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class", "IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model", "'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id", "= serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if obj.user else", "def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return", "from recycle.models import Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class", "serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M')", "'location': {'required': False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime", "return obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for", "( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model =", "serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if", "CommercialRequest fields = ( 'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' )", "def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id", "@staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj):", "serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def get_user_id(obj):", "{'required': False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email =", "( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta:", "CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required':", "get_user_id(obj): return obj.user.id if obj.user else -1 @staticmethod def get_collector_id(obj): return obj.collector.id if", "'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date') )", "serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if obj.user else ''", "Meta: model = Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types',", "'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id", "= Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' )", "'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = Transaction", "get_collector_id(obj): return obj.collector.id if obj.collector else -1 class Meta: model = Transaction fields", "'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model =", "= serializers.ReadOnlyField() class Meta: model = Transaction fields = ( 'id', 'garbage_type', 'points',", "'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model", "owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def", "class Meta: model = CommercialRequest fields = ( 'id', 'address', 'date', 'garbage_type', 'mass',", "'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model", "= serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj):", "obj.user.email if obj.user else '' @staticmethod def get_location_id(obj): return obj.location.id if obj.location else", "obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type,", "datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return", "model = Transaction fields = ( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id'", "serializers.ListField(required=False) class Meta: model = Location fields = ( 'id', 'address', 'open_time', 'close_time',", ") validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class", "False}, 'date': {'required': False}, 'garbage_type': {'required': False}, 'mass': {'required': False}, 'status': {'required': False},", "class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id =", "id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod", "-1 class Meta: model = CommercialRequest fields = ( 'id', 'address', 'email', 'date',", "return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short':", "Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time", "'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = { 'address': {'required': False},", "def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display()", "location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if obj.user", "fields = ( 'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' )", "DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id", "return obj.user.email if obj.user else '' @staticmethod def get_location_id(obj): return obj.location.id if obj.location", "return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id if obj.user", "obj.user else -1 @staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector else -1 class", "'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta:", "@staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 @staticmethod def get_collector_id(obj): return", "'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "else -1 @staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector else -1 class Meta:", "'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer):", "obj.location.id if obj.location else -1 @staticmethod def get_user_id(obj): return obj.user.id if obj.user else", "= serializers.ListField(required=False) class Meta: model = Location fields = ( 'id', 'address', 'open_time',", "[{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class Meta: model", "return obj.user.id if obj.user else -1 class Meta: model = CommercialRequest fields =", "= serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def", "'garbage_type', 'mass', 'status', 'location', 'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class", "{'required': False}, 'garbage_type': {'required': False}, 'mass': {'required': False}, 'status': {'required': False}, 'location': {'required':", "get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def get_garbage_types(obj): return [{", "False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField()", "'user' ) validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 @staticmethod def", "extra_kwargs = { 'address': {'required': False}, 'date': {'required': False}, 'garbage_type': {'required': False}, 'mass':", "'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner':", "{'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner': {'required':", "if obj.user else -1 @staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector else -1", "Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators", "False}, 'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False},", "collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod", "Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id =", "rest_framework import serializers from recycle.models import Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator,", "'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False)", "@staticmethod def get_email(obj): return obj.user.email if obj.user else '' @staticmethod def get_location_id(obj): return", "'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest", "get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class", "%Y at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1 @staticmethod", "get_email(obj): return obj.user.email if obj.user else '' @staticmethod def get_location_id(obj): return obj.location.id if", "'mass': {'required': False}, 'status': {'required': False}, 'location': {'required': False}, 'user': {'required': False} }", "'user' ) validators = ( IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required': False},", "CommercialRequest fields = ( 'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id'", "IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time =", "return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class Meta:", ") class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields =", "'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id", "False}, 'mass': {'required': False}, 'status': {'required': False}, 'location': {'required': False}, 'user': {'required': False}", "False}, 'garbage_type': {'required': False}, 'mass': {'required': False}, 'status': {'required': False}, 'location': {'required': False},", "'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs = {", "= serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj): return obj.datetime.strftime('%b", "'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) extra_kwargs", "import Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id", "serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id = serializers.SerializerMethodField() @staticmethod def get_datetime(obj):", "-1 @staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector else -1 class Meta: model", "serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def", "return obj.user.id if obj.user else -1 @staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector", "'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model", "class Meta: model = CommercialRequest fields = ( 'id', 'address', 'email', 'date', 'garbage_type',", "{'required': False}, 'mass': {'required': False}, 'status': {'required': False}, 'location': {'required': False}, 'user': {'required':", "class Meta: model = Transaction fields = ( 'id', 'garbage_type', 'points', 'mass', 'user',", "'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types", "model = Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id'", "id = serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = ( 'id', 'address',", "False}, 'garbage_types': {'required': False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() collector_id =", "= serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location fields = (", "obj.garbagetype_set.all()] class Meta: model = Location fields = ( 'id', 'address', 'open_time', 'close_time',", "{'required': False}, 'date': {'required': False}, 'garbage_type': {'required': False}, 'mass': {'required': False}, 'status': {'required':", "serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return", "fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators =", "= serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email", "'long': gt.get_garbage_type_display() } for gt in obj.garbagetype_set.all()] class Meta: model = Location fields", "False}, 'location': {'required': False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField()", "EditLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location fields", "False}, 'status': {'required': False}, 'location': {'required': False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer):", "'address': {'required': False}, 'date': {'required': False}, 'garbage_type': {'required': False}, 'mass': {'required': False}, 'status':", "@staticmethod def get_collector_id(obj): return obj.collector.id if obj.collector else -1 class Meta: model =", "serializers from recycle.models import Location, CommercialRequest, Transaction from recycle.validators import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast", "obj.user.id if obj.user else -1 class Meta: model = CommercialRequest fields = (", "'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = ( IsGarbageCollectorValidator('owner'), ) class EditLocationSerializer(serializers.ModelSerializer):", "'address': {'required': False}, 'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types':", "= CommercialRequest fields = ( 'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id',", "'close_time': {'required': False}, 'price_per_kg': {'required': False}, 'garbage_types': {'required': False}, 'owner': {'required': False} }", "'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class", "{'required': False}, 'garbage_types': {'required': False}, 'owner': {'required': False} } class CommercialOrderSerializer(serializers.ModelSerializer): id =", "@staticmethod def get_datetime(obj): return obj.datetime.strftime('%b %d, %Y at %H:%M') @staticmethod def get_user_id(obj): return", "class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = Transaction fields = (", "import IsGarbageCollectorValidator, IsCommercialValidator, DateIsNotPast class LocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() open_time = serializers.SerializerMethodField() close_time", ") class CreateTransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta: model = Transaction fields =", "Location fields = ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner_id' ) class", "close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return", "= ( IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required': False}, 'date': {'required': False},", "if obj.user else '' @staticmethod def get_location_id(obj): return obj.location.id if obj.location else -1", "CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model = Location fields", "serializers.ReadOnlyField() email = serializers.SerializerMethodField() location_id = serializers.SerializerMethodField() user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj):", "validators = ( IsCommercialValidator('user'), DateIsNotPast('date') ) class EditCommercialOrderSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() class Meta:", "obj.owner.id @staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt", "@staticmethod def get_garbage_types(obj): return [{ 'short': gt.garbage_type, 'long': gt.get_garbage_type_display() } for gt in", "IsCommercialValidator('user'), ) extra_kwargs = { 'address': {'required': False}, 'date': {'required': False}, 'garbage_type': {'required':", "return obj.collector.id if obj.collector else -1 class Meta: model = Transaction fields =", "{'required': False}, 'status': {'required': False}, 'location': {'required': False}, 'user': {'required': False} } class", "-1 class Meta: model = Transaction fields = ( 'id', 'datetime', 'garbage_type', 'mass',", "False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() datetime = serializers.SerializerMethodField()", "( 'id', 'address', 'date', 'garbage_type', 'mass', 'status', 'location', 'user' ) validators = (", "return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod", "else '' @staticmethod def get_location_id(obj): return obj.location.id if obj.location else -1 @staticmethod def", "user_id = serializers.SerializerMethodField() @staticmethod def get_email(obj): return obj.user.email if obj.user else '' @staticmethod", "= serializers.ReadOnlyField() class Meta: model = CommercialRequest fields = ( 'id', 'address', 'date',", "%d, %Y at %H:%M') @staticmethod def get_user_id(obj): return obj.user.id if obj.user else -1", "= ( 'id', 'address', 'open_time', 'close_time', 'price_per_kg', 'garbage_types', 'owner' ) validators = (", "garbage_types = serializers.SerializerMethodField() @staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M')", "fields = ( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer):", "if obj.collector else -1 class Meta: model = Transaction fields = ( 'id',", "'status': {'required': False}, 'location': {'required': False}, 'user': {'required': False} } class TransactionSerializer(serializers.ModelSerializer): id", "class Meta: model = Transaction fields = ( 'id', 'datetime', 'garbage_type', 'mass', 'points',", "( 'id', 'address', 'email', 'date', 'garbage_type', 'mass', 'status', 'location_id', 'user_id' ) class CreateCommercialOrderSerializer(serializers.ModelSerializer):", "obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj): return obj.owner.id @staticmethod def", "Meta: model = CommercialRequest fields = ( 'id', 'address', 'email', 'date', 'garbage_type', 'mass',", ") class CreateLocationSerializer(serializers.ModelSerializer): id = serializers.ReadOnlyField() garbage_types = serializers.ListField(required=False) class Meta: model =", "{ 'address': {'required': False}, 'open_time': {'required': False}, 'close_time': {'required': False}, 'price_per_kg': {'required': False},", "obj.collector.id if obj.collector else -1 class Meta: model = Transaction fields = (", "@staticmethod def get_open_time(obj): return obj.open_time.strftime('%H:%M') @staticmethod def get_close_time(obj): return obj.close_time.strftime('%H:%M') @staticmethod def get_owner_id(obj):", "= serializers.SerializerMethodField() close_time = serializers.SerializerMethodField() owner_id = serializers.SerializerMethodField() garbage_types = serializers.SerializerMethodField() @staticmethod def", "@staticmethod def get_location_id(obj): return obj.location.id if obj.location else -1 @staticmethod def get_user_id(obj): return", "Meta: model = CommercialRequest fields = ( 'id', 'address', 'date', 'garbage_type', 'mass', 'status',", "( 'id', 'datetime', 'garbage_type', 'mass', 'points', 'user_id', 'collector_id' ) class CreateTransactionSerializer(serializers.ModelSerializer): id =" ]
[ "# def __init__(self, val=0, next=None): # self.val = val # self.next = next", "= cur.next cur.next = head m = n - k % n i", "k): \"\"\" :type head: ListNode :type k: int :rtype: ListNode \"\"\" if not", "1 cur = head while cur.next: n+=1 cur = cur.next cur.next = head", "head: return None n = 1 cur = head while cur.next: n+=1 cur", "while cur.next: n+=1 cur = cur.next cur.next = head m = n -", "self.val = val # self.next = next class Solution(object): def rotateRight(self, head, k):", ":type k: int :rtype: ListNode \"\"\" if not head: return None n =", "singly-linked list. # class ListNode(object): # def __init__(self, val=0, next=None): # self.val =", "i = 0 while i < m: cur = cur.next i += 1", "for singly-linked list. # class ListNode(object): # def __init__(self, val=0, next=None): # self.val", "cur.next: n+=1 cur = cur.next cur.next = head m = n - k", "0 while i < m: cur = cur.next i += 1 res =", "# self.val = val # self.next = next class Solution(object): def rotateRight(self, head,", "cur = cur.next cur.next = head m = n - k % n", "while i < m: cur = cur.next i += 1 res = cur.next", "k: int :rtype: ListNode \"\"\" if not head: return None n = 1", "\"\"\" :type head: ListNode :type k: int :rtype: ListNode \"\"\" if not head:", "list. # class ListNode(object): # def __init__(self, val=0, next=None): # self.val = val", "def __init__(self, val=0, next=None): # self.val = val # self.next = next class", "i < m: cur = cur.next i += 1 res = cur.next cur.next", "cur = cur.next i += 1 res = cur.next cur.next = None return", "# Definition for singly-linked list. # class ListNode(object): # def __init__(self, val=0, next=None):", "cur = head while cur.next: n+=1 cur = cur.next cur.next = head m", "not head: return None n = 1 cur = head while cur.next: n+=1", "ListNode(object): # def __init__(self, val=0, next=None): # self.val = val # self.next =", ":rtype: ListNode \"\"\" if not head: return None n = 1 cur =", "k % n i = 0 while i < m: cur = cur.next", "n = 1 cur = head while cur.next: n+=1 cur = cur.next cur.next", "next class Solution(object): def rotateRight(self, head, k): \"\"\" :type head: ListNode :type k:", "if not head: return None n = 1 cur = head while cur.next:", "self.next = next class Solution(object): def rotateRight(self, head, k): \"\"\" :type head: ListNode", "None n = 1 cur = head while cur.next: n+=1 cur = cur.next", "= next class Solution(object): def rotateRight(self, head, k): \"\"\" :type head: ListNode :type", "Solution(object): def rotateRight(self, head, k): \"\"\" :type head: ListNode :type k: int :rtype:", "return None n = 1 cur = head while cur.next: n+=1 cur =", "# self.next = next class Solution(object): def rotateRight(self, head, k): \"\"\" :type head:", "= head while cur.next: n+=1 cur = cur.next cur.next = head m =", "val # self.next = next class Solution(object): def rotateRight(self, head, k): \"\"\" :type", "cur.next cur.next = head m = n - k % n i =", "__init__(self, val=0, next=None): # self.val = val # self.next = next class Solution(object):", "class Solution(object): def rotateRight(self, head, k): \"\"\" :type head: ListNode :type k: int", "n - k % n i = 0 while i < m: cur", "% n i = 0 while i < m: cur = cur.next i", "n i = 0 while i < m: cur = cur.next i +=", "class ListNode(object): # def __init__(self, val=0, next=None): # self.val = val # self.next", "rotateRight(self, head, k): \"\"\" :type head: ListNode :type k: int :rtype: ListNode \"\"\"", "= head m = n - k % n i = 0 while", "n+=1 cur = cur.next cur.next = head m = n - k %", "def rotateRight(self, head, k): \"\"\" :type head: ListNode :type k: int :rtype: ListNode", "= val # self.next = next class Solution(object): def rotateRight(self, head, k): \"\"\"", "m = n - k % n i = 0 while i <", "= n - k % n i = 0 while i < m:", "- k % n i = 0 while i < m: cur =", "m: cur = cur.next i += 1 res = cur.next cur.next = None", "= cur.next i += 1 res = cur.next cur.next = None return res", "Definition for singly-linked list. # class ListNode(object): # def __init__(self, val=0, next=None): #", "ListNode :type k: int :rtype: ListNode \"\"\" if not head: return None n", "= 0 while i < m: cur = cur.next i += 1 res", "# class ListNode(object): # def __init__(self, val=0, next=None): # self.val = val #", "head, k): \"\"\" :type head: ListNode :type k: int :rtype: ListNode \"\"\" if", "int :rtype: ListNode \"\"\" if not head: return None n = 1 cur", "cur.next = head m = n - k % n i = 0", "ListNode \"\"\" if not head: return None n = 1 cur = head", "head: ListNode :type k: int :rtype: ListNode \"\"\" if not head: return None", "= 1 cur = head while cur.next: n+=1 cur = cur.next cur.next =", "< m: cur = cur.next i += 1 res = cur.next cur.next =", "\"\"\" if not head: return None n = 1 cur = head while", ":type head: ListNode :type k: int :rtype: ListNode \"\"\" if not head: return", "val=0, next=None): # self.val = val # self.next = next class Solution(object): def", "head while cur.next: n+=1 cur = cur.next cur.next = head m = n", "head m = n - k % n i = 0 while i", "next=None): # self.val = val # self.next = next class Solution(object): def rotateRight(self," ]
[ "and Contributors # See license.txt # import frappe import unittest class TestStore(unittest.TestCase): pass", "(c) 2021, mds and Contributors # See license.txt # import frappe import unittest", "2021, mds and Contributors # See license.txt # import frappe import unittest class", "mds and Contributors # See license.txt # import frappe import unittest class TestStore(unittest.TestCase):", "# Copyright (c) 2021, mds and Contributors # See license.txt # import frappe", "<reponame>keithyang77/ofstest # Copyright (c) 2021, mds and Contributors # See license.txt # import", "Copyright (c) 2021, mds and Contributors # See license.txt # import frappe import" ]
[ "validate_config(config) # initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not", "<filename>steps/oci-database-step-instances-terminate/step.py #!/usr/bin/env python import oci config = oci.config.from_file() from oci.config import validate_config validate_config(config)", "not db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing instances: {}'.format(db_system_ids)) for db_system_id in", "print(\"No instance IDs found\") exit(0) print('Terminateing instances: {}'.format(db_system_ids)) for db_system_id in db_system_ids: database.terminate_db_system(db_system_ids)", "DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs", "python import oci config = oci.config.from_file() from oci.config import validate_config validate_config(config) # initialize", "validate_config validate_config(config) # initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if", "oci.config.from_file() from oci.config import validate_config validate_config(config) # initialize the DatabaseClient database = oci.database.DatabaseClient(config)", "db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing instances: {}'.format(db_system_ids)) for db_system_id in db_system_ids:", "db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing instances:", "oci config = oci.config.from_file() from oci.config import validate_config validate_config(config) # initialize the DatabaseClient", "# initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids:", "= oci.config.from_file() from oci.config import validate_config validate_config(config) # initialize the DatabaseClient database =", "oci.config import validate_config validate_config(config) # initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids =", "database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs found\")", "= [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing instances: {}'.format(db_system_ids))", "import validate_config validate_config(config) # initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"]", "#!/usr/bin/env python import oci config = oci.config.from_file() from oci.config import validate_config validate_config(config) #", "initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No", "= oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs found\") exit(0)", "the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance", "from oci.config import validate_config validate_config(config) # initialize the DatabaseClient database = oci.database.DatabaseClient(config) db_system_ids", "oci.database.DatabaseClient(config) db_system_ids = [\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing", "config = oci.config.from_file() from oci.config import validate_config validate_config(config) # initialize the DatabaseClient database", "if not db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing instances: {}'.format(db_system_ids)) for db_system_id", "import oci config = oci.config.from_file() from oci.config import validate_config validate_config(config) # initialize the", "[\"dfsdfgsfdsdf\",\"fsdxfgsd\"] if not db_system_ids: print(\"No instance IDs found\") exit(0) print('Terminateing instances: {}'.format(db_system_ids)) for" ]
[ "* 50) print \"dir info: {}\".format(dir(obj)) for attr in dir(obj): try: if attr.startswith(\"__\")", "else: print \"--- attribute {}: {}\".format(attr, attr_value) except Exception, e: print \"*** read", "ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\": func.func_name, \"file_name\":", "full=False): print \"\\nStart {} {}\".format(obj, \"=\" * 50) print \"dir info: {}\".format(dir(obj)) for", "{}\".format(obj, \"=\" * 50) print \"dir info: {}\".format(dir(obj)) for attr in dir(obj): try:", "50) print \"dir info: {}\".format(dir(obj)) for attr in dir(obj): try: if attr.startswith(\"__\") and", "(end_time - start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } )", "attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\") and not inner: continue attr_value =", "[{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\": func.func_name,", "inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\" * 50) print \"dir info: {}\".format(dir(obj))", "print \"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) *", "inner: continue attr_value = getattr(obj, attr) if \"method\" in str(type(attr_value)): print \"### method", "\"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result return _wrapper def super_test(): print_obj(str,", "func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result return _wrapper def super_test(): print_obj(str, full=True)", "print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\" * 50) print \"dir info:", "for attr in dir(obj): try: if attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\")", "''' import time def print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\" *", "{}: {}\".format(attr, attr_value) except Exception, e: print \"*** read error {}: {}\".format(attr, e)", "start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result", "in str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper", "print \"--- attribute {}: {}\".format(attr, attr_value) except Exception, e: print \"*** read error", "= [0] def _wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs) end_time = time.time()", "cshanxiao @date: 2016-07-18 ''' import time def print_obj(obj, inner=True, full=False): print \"\\nStart {}", "attr) if \"method\" in str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)):", "* 50) def func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time = time.time() result", "str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr)", "{}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\" * 50) def func_time(func): _id =", "\"method\" in str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"===", "time def print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\" * 50) print", "attr_value = getattr(obj, attr) if \"method\" in str(type(attr_value)): print \"### method {}\".format(attr) elif", "print \"=== wrapper {}\".format(attr) else: print \"--- attribute {}: {}\".format(attr, attr_value) except Exception,", "end_time = time.time() _id[0] += 1 print \"{} [{}] [{:.3f} ms] {}\".format( _id[0],", "read error {}: {}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\" * 50) def", "50) def func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time = time.time() result =", "_wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs) end_time = time.time() _id[0] += 1", "and not full: continue if attr.startswith(\"_\") and not inner: continue attr_value = getattr(obj,", "in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print \"--- attribute {}: {}\".format(attr, attr_value)", "def print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\" * 50) print \"dir", "\"\\nStart {} {}\".format(obj, \"=\" * 50) print \"dir info: {}\".format(dir(obj)) for attr in", "attr in dir(obj): try: if attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\") and", "time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno", "= time.time() _id[0] += 1 print \"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d", "} ) return result return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x: x,", "{} {}\".format(obj, \"=\" * 50) print \"dir info: {}\".format(dir(obj)) for attr in dir(obj):", "result return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x: x, inner=True) if __name__", "e: print \"*** read error {}: {}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\"", "return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x: x, inner=True) if __name__ ==", "%H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno }", "wrapper {}\".format(attr) else: print \"--- attribute {}: {}\".format(attr, attr_value) except Exception, e: print", "{}\".format(attr) else: print \"--- attribute {}: {}\".format(attr, attr_value) except Exception, e: print \"***", "info: {}\".format(dir(obj)) for attr in dir(obj): try: if attr.startswith(\"__\") and not full: continue", "1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result return _wrapper", "\"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print \"--- attribute {}: {}\".format(attr,", "\"*** read error {}: {}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\" * 50)", "[{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\":", "try: if attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\") and not inner: continue", "\"=\" * 50) def func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time = time.time()", "if attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\") and not inner: continue attr_value", "{\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result return _wrapper def", "in dir(obj): try: if attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\") and not", "_id = [0] def _wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs) end_time =", "@author: cshanxiao @date: 2016-07-18 ''' import time def print_obj(obj, inner=True, full=False): print \"\\nStart", "_wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x: x, inner=True) if __name__ == '__main__':", "= time.time() result = func(*args,**kwargs) end_time = time.time() _id[0] += 1 print \"{}", "-*- u''' @summary: @author: cshanxiao @date: 2016-07-18 ''' import time def print_obj(obj, inner=True,", "e) print (\"End {} {}\").format(obj, \"=\" * 50) def func_time(func): _id = [0]", "func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result return _wrapper def super_test():", "def _wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs) end_time = time.time() _id[0] +=", "(\"End {} {}\").format(obj, \"=\" * 50) def func_time(func): _id = [0] def _wrapper(*args,**kwargs):", "- start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return", "\"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print", "{}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print \"--- attribute", "except Exception, e: print \"*** read error {}: {}\".format(attr, e) print (\"End {}", "print \"\\nStart {} {}\".format(obj, \"=\" * 50) print \"dir info: {}\".format(dir(obj)) for attr", "if attr.startswith(\"_\") and not inner: continue attr_value = getattr(obj, attr) if \"method\" in", "elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print \"--- attribute {}:", "start_time = time.time() result = func(*args,**kwargs) end_time = time.time() _id[0] += 1 print", "Exception, e: print \"*** read error {}: {}\".format(attr, e) print (\"End {} {}\").format(obj,", "@summary: @author: cshanxiao @date: 2016-07-18 ''' import time def print_obj(obj, inner=True, full=False): print", "= getattr(obj, attr) if \"method\" in str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\"", "1 print \"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time)", "print \"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else:", "func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs) end_time", "import time def print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\" * 50)", "time.time() result = func(*args,**kwargs) end_time = time.time() _id[0] += 1 print \"{} [{}]", "{}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename,", "if \"method\" in str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print", "error {}: {}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\" * 50) def func_time(func):", "full: continue if attr.startswith(\"_\") and not inner: continue attr_value = getattr(obj, attr) if", "u''' @summary: @author: cshanxiao @date: 2016-07-18 ''' import time def print_obj(obj, inner=True, full=False):", "+= 1 print \"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time -", "method {}\".format(attr) elif \"wrapper\" in str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print \"---", "continue if attr.startswith(\"_\") and not inner: continue attr_value = getattr(obj, attr) if \"method\"", "{}: {}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\" * 50) def func_time(func): _id", "\"=== wrapper {}\".format(attr) else: print \"--- attribute {}: {}\".format(attr, attr_value) except Exception, e:", "print \"*** read error {}: {}\".format(attr, e) print (\"End {} {}\").format(obj, \"=\" *", "dir(obj): try: if attr.startswith(\"__\") and not full: continue if attr.startswith(\"_\") and not inner:", "\"=\" * 50) print \"dir info: {}\".format(dir(obj)) for attr in dir(obj): try: if", "{} {}\").format(obj, \"=\" * 50) def func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time", "continue attr_value = getattr(obj, attr) if \"method\" in str(type(attr_value)): print \"### method {}\".format(attr)", "_id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\":", "print \"dir info: {}\".format(dir(obj)) for attr in dir(obj): try: if attr.startswith(\"__\") and not", "str(type(attr_value)): print \"=== wrapper {}\".format(attr) else: print \"--- attribute {}: {}\".format(attr, attr_value) except", "<reponame>cshanxiao/superutils<filename>superutils/utils.py # -*- coding: utf-8 -*- u''' @summary: @author: cshanxiao @date: 2016-07-18 '''", "and not inner: continue attr_value = getattr(obj, attr) if \"method\" in str(type(attr_value)): print", "2016-07-18 ''' import time def print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj, \"=\"", "attr_value) except Exception, e: print \"*** read error {}: {}\".format(attr, e) print (\"End", ") return result return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x: x, inner=True)", "return result return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x: x, inner=True) if", "= func(*args,**kwargs) end_time = time.time() _id[0] += 1 print \"{} [{}] [{:.3f} ms]", "@date: 2016-07-18 ''' import time def print_obj(obj, inner=True, full=False): print \"\\nStart {} {}\".format(obj,", "{}\").format(obj, \"=\" * 50) def func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time =", "print (\"End {} {}\").format(obj, \"=\" * 50) def func_time(func): _id = [0] def", "not inner: continue attr_value = getattr(obj, attr) if \"method\" in str(type(attr_value)): print \"###", "[0] def _wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs) end_time = time.time() _id[0]", "result = func(*args,**kwargs) end_time = time.time() _id[0] += 1 print \"{} [{}] [{:.3f}", "\"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time - start_time) * 1000,", "\"file_lineno\": func.func_code.co_firstlineno } ) return result return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda", "def func_time(func): _id = [0] def _wrapper(*args,**kwargs): start_time = time.time() result = func(*args,**kwargs)", "\"--- attribute {}: {}\".format(attr, attr_value) except Exception, e: print \"*** read error {}:", "attribute {}: {}\".format(attr, attr_value) except Exception, e: print \"*** read error {}: {}\".format(attr,", "utf-8 -*- u''' @summary: @author: cshanxiao @date: 2016-07-18 ''' import time def print_obj(obj,", "def super_test(): print_obj(str, full=True) print_obj(lambda x: x, inner=True) if __name__ == '__main__': super_test()", "* 1000, {\"func_name\": func.func_name, \"file_name\": func.func_code.co_filename, \"file_lineno\": func.func_code.co_firstlineno } ) return result return", "func(*args,**kwargs) end_time = time.time() _id[0] += 1 print \"{} [{}] [{:.3f} ms] {}\".format(", "# -*- coding: utf-8 -*- u''' @summary: @author: cshanxiao @date: 2016-07-18 ''' import", "coding: utf-8 -*- u''' @summary: @author: cshanxiao @date: 2016-07-18 ''' import time def", "not full: continue if attr.startswith(\"_\") and not inner: continue attr_value = getattr(obj, attr)", "{}\".format(attr, attr_value) except Exception, e: print \"*** read error {}: {}\".format(attr, e) print", "{}\".format(dir(obj)) for attr in dir(obj): try: if attr.startswith(\"__\") and not full: continue if", "func.func_code.co_firstlineno } ) return result return _wrapper def super_test(): print_obj(str, full=True) print_obj(lambda x:", "time.time() _id[0] += 1 print \"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"),", "-*- coding: utf-8 -*- u''' @summary: @author: cshanxiao @date: 2016-07-18 ''' import time", "attr.startswith(\"_\") and not inner: continue attr_value = getattr(obj, attr) if \"method\" in str(type(attr_value)):", "\"dir info: {}\".format(dir(obj)) for attr in dir(obj): try: if attr.startswith(\"__\") and not full:", "_id[0] += 1 print \"{} [{}] [{:.3f} ms] {}\".format( _id[0], time.strftime(\"%Y-%m-%d %H:%M:%S\"), (end_time", "getattr(obj, attr) if \"method\" in str(type(attr_value)): print \"### method {}\".format(attr) elif \"wrapper\" in" ]
[ "use, modify and redistribute this software as you please, as long as you", "[] # Verify size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks size", "e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error when", "texts for m in range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m] # Create", "button_text_row[m] callback = callback_row[m] # Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback) #", "size doesn't match\") # Iterate over button texts for m in range(len(button_text_row)): text", "\"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks):", "you are using this software! It will surely make my day. \"\"\" from", "to button row button_row.append(kb_button) # Add row to keyboard kb_buttons.append(button_row) keyboard = InlineKeyboardMarkup(kb_buttons)", "been developed by github user fndh (http://github.com/fndh) You are free to use, modify", "when sending message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e == 'Timed", "def send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with", "= callback_row[m] # Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add to", "None or callbacks is None: return None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons", "= [] # Iterate over information rows for n in range(len(button_texts)): # Extract", "this software as you please, as long as you follow the conditions listed", "Verify size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't match\")", "Exception as e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def send_typing(self, bot, chat_id):", "for m in range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m] # Create button", "and callbacks size doesn't match\") kb_buttons = [] # Iterate over information rows", "bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate", "tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown, markup or replies. Returns", "Retry up to 3 times return self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup,", "doesn't match\") # Iterate over button texts for m in range(len(button_text_row)): text =", "= button_text_row[m] callback = callback_row[m] # Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback)", "3: # Retry up to 3 times return self.send_text( bot, user, message, tried=tried+1,", "bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when", "e == 'Timed out' and tried < 3: # Retry up to 3", "the errors self.logger.log( f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\")", "ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger = logger def send_text(self,", "my day. \"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def", "developed by github user fndh (http://github.com/fndh) You are free to use, modify and", "when sending message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error when sending message\")", "if you are using this software! It will surely make my day. \"\"\"", "this software! It will surely make my day. \"\"\" from telegram import (TelegramError,", "message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def", "parse_mode=parse_mode) except TelegramError as e: # Log the errors self.logger.log( f\"TelegramError when sending", "send_typing(self, bot, chat_id): \"\"\" Send \"Bot is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id,", "user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with the options", "Iterate over information rows for n in range(len(button_texts)): # Extract display text and", "or callbacks is None: return None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and", "are free to use, modify and redistribute this software as you please, as", "import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger = logger", "send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown,", "the options specified. Make sure bot handles callback methods before creating a keyboard.", "using this software! It will surely make my day. \"\"\" from telegram import", "sending message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send \"Bot is typing...\" action", "== 'Timed out' and tried < 3: # Retry up to 3 times", "the github repository indicated. I want to thank you for reading this small", "the LICENSE file of the github repository indicated. I want to thank you", "surely make my day. \"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class", "times return self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as", "Add to button row button_row.append(kb_button) # Add row to keyboard kb_buttons.append(button_row) keyboard =", "!= len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons = [] #", "bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown, markup", "len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons = []", "Returns the sent message. \"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode)", "to use, modify and redistribute this software as you please, as long as", "\"\"\"Generate a keyboard with the options specified. Make sure bot handles callback methods", "\"Bot is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id):", "'Timed out' and tried < 3: # Retry up to 3 times return", "text=text, callback_data=callback) # Add to button row button_row.append(kb_button) # Add row to keyboard", "button_text_row = button_texts[n] callback_row = callbacks[n] button_row = [] # Verify size if", "are using this software! It will surely make my day. \"\"\" from telegram", "consider sending me a message if you are using this software! It will", "as e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\"", "match\") kb_buttons = [] # Iterate over information rows for n in range(len(button_texts)):", "!= len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't match\") # Iterate over button", "will surely make my day. \"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton)", "reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: # Log the errors self.logger.log( f\"TelegramError when", "(http://github.com/fndh) You are free to use, modify and redistribute this software as you", "= InlineKeyboardButton( text=text, callback_data=callback) # Add to button row button_row.append(kb_button) # Add row", "= logger def send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send", "over button texts for m in range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m]", "= [] # Verify size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks", "text and callback function button_text_row = button_texts[n] callback_row = callbacks[n] button_row = []", "parse_mode=None): \"\"\" Send messages with markdown, markup or replies. Returns the sent message.", "messages with markdown, markup or replies. Returns the sent message. \"\"\" try: return", "tried < 3: # Retry up to 3 times return self.send_text( bot, user,", "self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError", "f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e ==", "callbacks is None: return None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks", "InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger = logger def send_text(self, bot, user,", "forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with", "message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown, markup or replies.", "text = button_text_row[m] callback = callback_row[m] # Create button kb_button = InlineKeyboardButton( text=text,", "\"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e:", "Iterate over button texts for m in range(len(button_text_row)): text = button_text_row[m] callback =", "sent message. \"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError", "# Iterate over button texts for m in range(len(button_text_row)): text = button_text_row[m] callback", "class MessageSender: def __init__(self, logger): self.logger = logger def send_text(self, bot, user, message,", "str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: # Log the errors", "bot handles callback methods before creating a keyboard. \"\"\" if button_texts is None", "len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons = [] # Iterate", "as you please, as long as you follow the conditions listed in the", "None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons", "MessageSender: def __init__(self, logger): self.logger = logger def send_text(self, bot, user, message, tried=0,", "range(len(button_texts)): # Extract display text and callback function button_text_row = button_texts[n] callback_row =", "methods before creating a keyboard. \"\"\" if button_texts is None or callbacks is", "callbacks[n] button_row = [] # Verify size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons", "I want to thank you for reading this small paragraph, and please consider", "thank you for reading this small paragraph, and please consider sending me a", "telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger =", "logger def send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages", "self.logger = logger def send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\"", "def send_typing(self, bot, chat_id): \"\"\" Send \"Bot is typing...\" action to chat \"\"\"", "make my day. \"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender:", "\"\"\" Send \"Bot is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self,", "a keyboard with the options specified. Make sure bot handles callback methods before", "when sending message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send \"Bot is typing...\"", "user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when sending", "if e == 'Timed out' and tried < 3: # Retry up to", "and callbacks size doesn't match\") # Iterate over button texts for m in", "message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with the options specified. Make", "as e: # Log the errors self.logger.log( f\"TelegramError when sending message to {user}:\")", "#{tried}/3\") if e == 'Timed out' and tried < 3: # Retry up", "small paragraph, and please consider sending me a message if you are using", "github user fndh (http://github.com/fndh) You are free to use, modify and redistribute this", "Log the errors self.logger.log( f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e} - Try", "for n in range(len(button_texts)): # Extract display text and callback function button_text_row =", "len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't match\") # Iterate over", "sending me a message if you are using this software! It will surely", "e: # Log the errors self.logger.log( f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e}", "redistribute this software as you please, as long as you follow the conditions", "github repository indicated. I want to thank you for reading this small paragraph,", "errors self.logger.log( f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if", "software has been developed by github user fndh (http://github.com/fndh) You are free to", "def __init__(self, logger): self.logger = logger def send_text(self, bot, user, message, tried=0, reply=None,", "# Log the errors self.logger.log( f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e} -", "LICENSE file of the github repository indicated. I want to thank you for", "return None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't match\")", "in range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m] # Create button kb_button =", "sending message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e)", "callback function button_text_row = button_texts[n] callback_row = callbacks[n] button_row = [] # Verify", "callback_data=callback) # Add to button row button_row.append(kb_button) # Add row to keyboard kb_buttons.append(button_row)", "Send \"Bot is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message,", "this small paragraph, and please consider sending me a message if you are", "match\") # Iterate over button texts for m in range(len(button_text_row)): text = button_text_row[m]", "message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: # Log the errors self.logger.log(", "\"\"\" This software has been developed by github user fndh (http://github.com/fndh) You are", "= button_texts[n] callback_row = callbacks[n] button_row = [] # Verify size if len(button_text_row)", "as long as you follow the conditions listed in the LICENSE file of", "to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e == 'Timed out' and tried", "out' and tried < 3: # Retry up to 3 times return self.send_text(", "in range(len(button_texts)): # Extract display text and callback function button_text_row = button_texts[n] callback_row", "[] # Iterate over information rows for n in range(len(button_texts)): # Extract display", "callback_row = callbacks[n] button_row = [] # Verify size if len(button_text_row) != len(callback_row):", "if button_texts is None or callbacks is None: return None if len(button_texts) !=", "reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception", "\"\"\" if button_texts is None or callbacks is None: return None if len(button_texts)", "keyboard. \"\"\" if button_texts is None or callbacks is None: return None if", "specified. Make sure bot handles callback methods before creating a keyboard. \"\"\" if", "callback_row[m] # Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add to button", "except Exception as e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def send_typing(self, bot,", "and callback function button_text_row = button_texts[n] callback_row = callbacks[n] button_row = [] #", "# Retry up to 3 times return self.send_text( bot, user, message, tried=tried+1, reply=reply,", "# Add to button row button_row.append(kb_button) # Add row to keyboard kb_buttons.append(button_row) keyboard", "user fndh (http://github.com/fndh) You are free to use, modify and redistribute this software", "raise ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons = [] # Iterate over", "n in range(len(button_texts)): # Extract display text and callback function button_text_row = button_texts[n]", "m in range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m] # Create button kb_button", "__init__(self, logger): self.logger = logger def send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None,", "the conditions listed in the LICENSE file of the github repository indicated. I", "is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return", "self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error when sending", "replies. Returns the sent message. \"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup,", "except TelegramError as e: # Log the errors self.logger.log( f\"TelegramError when sending message", "message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send \"Bot is typing...\" action to", "It will surely make my day. \"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup,", "Make sure bot handles callback methods before creating a keyboard. \"\"\" if button_texts", "parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception as", "software! It will surely make my day. \"\"\" from telegram import (TelegramError, ChatAction,", "markdown, markup or replies. Returns the sent message. \"\"\" try: return bot.sendMessage( str(user),", "is None or callbacks is None: return None if len(button_texts) != len(callbacks): raise", "keyboard with the options specified. Make sure bot handles callback methods before creating", "reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown, markup or replies. Returns the", "reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: # Log the errors self.logger.log( f\"TelegramError", "as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error", "message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with the", "Send messages with markdown, markup or replies. Returns the sent message. \"\"\" try:", "\"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger):", "to thank you for reading this small paragraph, and please consider sending me", "self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send \"Bot is typing...\" action to chat", "(TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger = logger def", "ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons = [] # Iterate over information", "reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except", "software as you please, as long as you follow the conditions listed in", "rows for n in range(len(button_texts)): # Extract display text and callback function button_text_row", "listed in the LICENSE file of the github repository indicated. I want to", "= callbacks[n] button_row = [] # Verify size if len(button_text_row) != len(callback_row): raise", "button_texts is None or callbacks is None: return None if len(button_texts) != len(callbacks):", "return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with the options specified.", "options specified. Make sure bot handles callback methods before creating a keyboard. \"\"\"", "and redistribute this software as you please, as long as you follow the", "action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id) def", "want to thank you for reading this small paragraph, and please consider sending", "display text and callback function button_text_row = button_texts[n] callback_row = callbacks[n] button_row =", "follow the conditions listed in the LICENSE file of the github repository indicated.", "with markdown, markup or replies. Returns the sent message. \"\"\" try: return bot.sendMessage(", "and tried < 3: # Retry up to 3 times return self.send_text( bot,", "callbacks size doesn't match\") kb_buttons = [] # Iterate over information rows for", "with the options specified. Make sure bot handles callback methods before creating a", "indicated. I want to thank you for reading this small paragraph, and please", "sending message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e == 'Timed out'", "error when sending message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send \"Bot is", "callback methods before creating a keyboard. \"\"\" if button_texts is None or callbacks", "day. \"\"\" from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self,", "- Try #{tried}/3\") if e == 'Timed out' and tried < 3: #", "in the LICENSE file of the github repository indicated. I want to thank", "return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: # Log", "by github user fndh (http://github.com/fndh) You are free to use, modify and redistribute", "typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id)", "long as you follow the conditions listed in the LICENSE file of the", "button row button_row.append(kb_button) # Add row to keyboard kb_buttons.append(button_row) keyboard = InlineKeyboardMarkup(kb_buttons) return", "you follow the conditions listed in the LICENSE file of the github repository", "# Extract display text and callback function button_text_row = button_texts[n] callback_row = callbacks[n]", "message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e == 'Timed out' and", "bot, chat_id): \"\"\" Send \"Bot is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING)", "None: return None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't", "Try #{tried}/3\") if e == 'Timed out' and tried < 3: # Retry", "chat_id): \"\"\" Send \"Bot is typing...\" action to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def", "free to use, modify and redistribute this software as you please, as long", "if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't match\") # Iterate", "the sent message. \"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except", "a message if you are using this software! It will surely make my", "me a message if you are using this software! It will surely make", "for reading this small paragraph, and please consider sending me a message if", "button_texts, callbacks): \"\"\"Generate a keyboard with the options specified. Make sure bot handles", "please consider sending me a message if you are using this software! It", "file of the github repository indicated. I want to thank you for reading", "try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: #", "as you follow the conditions listed in the LICENSE file of the github", "self.logger.log( f\"TelegramError when sending message to {user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e", "is None: return None if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks size", "handles callback methods before creating a keyboard. \"\"\" if button_texts is None or", "doesn't match\") kb_buttons = [] # Iterate over information rows for n in", "Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add to button row button_row.append(kb_button)", "paragraph, and please consider sending me a message if you are using this", "markup or replies. Returns the sent message. \"\"\" try: return bot.sendMessage( str(user), message,", "# Verify size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't", "button texts for m in range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m] #", "function button_text_row = button_texts[n] callback_row = callbacks[n] button_row = [] # Verify size", "def forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard", "raise ValueError(\"Buttons and callbacks size doesn't match\") # Iterate over button texts for", "kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add to button row button_row.append(kb_button) # Add", "and please consider sending me a message if you are using this software!", "self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send \"Bot", "InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger = logger def send_text(self, bot,", "You are free to use, modify and redistribute this software as you please,", "over information rows for n in range(len(button_texts)): # Extract display text and callback", "Extract display text and callback function button_text_row = button_texts[n] callback_row = callbacks[n] button_row", "reading this small paragraph, and please consider sending me a message if you", "size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't match\") #", "modify and redistribute this software as you please, as long as you follow", "create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with the options specified. Make sure bot", "a keyboard. \"\"\" if button_texts is None or callbacks is None: return None", "e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def send_typing(self, bot, chat_id): \"\"\" Send", "\"\"\" Send messages with markdown, markup or replies. Returns the sent message. \"\"\"", "tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e)", "please, as long as you follow the conditions listed in the LICENSE file", "kb_buttons = [] # Iterate over information rows for n in range(len(button_texts)): #", "to 3 times return self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except", "to chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self,", "len(callback_row): raise ValueError(\"Buttons and callbacks size doesn't match\") # Iterate over button texts", "information rows for n in range(len(button_texts)): # Extract display text and callback function", "from telegram import (TelegramError, ChatAction, InlineKeyboardMarkup, InlineKeyboardButton) class MessageSender: def __init__(self, logger): self.logger", "return self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e:", "callback = callback_row[m] # Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add", "button_row = [] # Verify size if len(button_text_row) != len(callback_row): raise ValueError(\"Buttons and", "has been developed by github user fndh (http://github.com/fndh) You are free to use,", "message. \"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as", "up to 3 times return self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode)", "< 3: # Retry up to 3 times return self.send_text( bot, user, message,", "self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e == 'Timed out' and tried < 3:", "This software has been developed by github user fndh (http://github.com/fndh) You are free", "ValueError(\"Buttons and callbacks size doesn't match\") # Iterate over button texts for m", "message if you are using this software! It will surely make my day.", "<reponame>sharry008/anonymise \"\"\" This software has been developed by github user fndh (http://github.com/fndh) You", "RuntimeError as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled", "button_texts[n] callback_row = callbacks[n] button_row = [] # Verify size if len(button_text_row) !=", "you for reading this small paragraph, and please consider sending me a message", "callbacks size doesn't match\") # Iterate over button texts for m in range(len(button_text_row)):", "row button_row.append(kb_button) # Add row to keyboard kb_buttons.append(button_row) keyboard = InlineKeyboardMarkup(kb_buttons) return keyboard", "creating a keyboard. \"\"\" if button_texts is None or callbacks is None: return", "message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError as e: self.logger.log(\"RuntimeError when sending message\")", "sure bot handles callback methods before creating a keyboard. \"\"\" if button_texts is", "bot.sendMessage( str(user), message, reply_to_message_id=reply, reply_markup=reply_markup, parse_mode=parse_mode) except TelegramError as e: # Log the", "or replies. Returns the sent message. \"\"\" try: return bot.sendMessage( str(user), message, reply_to_message_id=reply,", "before creating a keyboard. \"\"\" if button_texts is None or callbacks is None:", "fndh (http://github.com/fndh) You are free to use, modify and redistribute this software as", "TelegramError as e: # Log the errors self.logger.log( f\"TelegramError when sending message to", "if len(button_texts) != len(callbacks): raise ValueError(\"Buttons and callbacks size doesn't match\") kb_buttons =", "logger): self.logger = logger def send_text(self, bot, user, message, tried=0, reply=None, reply_markup=None, parse_mode=None):", "range(len(button_text_row)): text = button_text_row[m] callback = callback_row[m] # Create button kb_button = InlineKeyboardButton(", "repository indicated. I want to thank you for reading this small paragraph, and", "user, message, tried=0, reply=None, reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown, markup or", "conditions listed in the LICENSE file of the github repository indicated. I want", "you please, as long as you follow the conditions listed in the LICENSE", "InlineKeyboardButton( text=text, callback_data=callback) # Add to button row button_row.append(kb_button) # Add row to", "def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a keyboard with the options specified. Make sure", "size doesn't match\") kb_buttons = [] # Iterate over information rows for n", "{user}:\") self.logger.log(f\"\\t{e} - Try #{tried}/3\") if e == 'Timed out' and tried <", "# Iterate over information rows for n in range(len(button_texts)): # Extract display text", "# Create button kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add to button row", "except RuntimeError as e: self.logger.log(\"RuntimeError when sending message\") self.logger.log(e) except Exception as e:", "of the github repository indicated. I want to thank you for reading this", "self.logger.log(e) except Exception as e: self.logger.log(\"Unhandled error when sending message\") self.logger.log(e) def send_typing(self,", "ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts, callbacks): \"\"\"Generate a", "button kb_button = InlineKeyboardButton( text=text, callback_data=callback) # Add to button row button_row.append(kb_button) #", "reply_markup=None, parse_mode=None): \"\"\" Send messages with markdown, markup or replies. Returns the sent", "3 times return self.send_text( bot, user, message, tried=tried+1, reply=reply, reply_markup=reply_markup, parse_mode=parse_mode) except RuntimeError", "callbacks): \"\"\"Generate a keyboard with the options specified. Make sure bot handles callback", "chat \"\"\" bot.sendChatAction(chat_id, ChatAction.TYPING) def forward_message(self, message, user_id): return message.forward(user_id) def create_inline_keyboard(self, button_texts," ]
[ "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "writing, software # distributed under the License is distributed on an \"AS IS\"", "@swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input @raise", "from image_verify import generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode, errMsg, code, imgUrl):", "\"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp)", "ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input", "Unless required by applicable law or agreed to in writing, software # distributed", "See the # License for the specific language governing permissions and limitations #", "class ImageResp: def __init__(self, errCode, errMsg, code, imgUrl): self.errCode = errCode self.errMsg =", "_datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\", path) if not os.path.exists(path", "+ _datehour + \"/\" + _id + '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath)", "self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码", "\"License\"); you may # not use this file except in compliance with the", "os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) # To save", "# Copyright 2016-2017 <EMAIL> # <EMAIL> # # Licensed under the Apache License,", "= generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "tornado.httpclient import * from tornado.httputil import url_concat from tornado_swagger import swagger from image_verify", "the License. You may obtain # a copy of the License at #", "+ '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\" +", "_id = generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got", "生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input @raise 500: Internal Server Error \"\"\"", "law or agreed to in writing, software # distributed under the License is", "@description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input @raise 500: Internal Server Error", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "the specific language governing permissions and limitations # under the License. import tornado.web", "errMsg, code, imgUrl): self.errCode = errCode self.errMsg = errMsg self.code = code self.imgUrl", "def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input @raise 500:", "'.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\" + self.request.host", "\"://\" + self.request.host img_url = img_url + '/static/image-verify/' + _datehour + \"/\" +", "\"/\" + _id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code, img_url) self.set_status(200)", "errMsg self.code = code self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def", "os import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const", "= path + \"/static/image-verify/\" + _datehour + \"/\" + _id + '.gif' mstream,", "express or implied. See the # License for the specific language governing permissions", "generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\",", "filepath=filepath) img_url = self.request.protocol + \"://\" + self.request.host img_url = img_url + '/static/image-verify/'", "tornado.httputil import url_concat from tornado_swagger import swagger from image_verify import generate_verify_image @swagger.model() class", "self.request.host img_url = img_url + '/static/image-verify/' + _datehour + \"/\" + _id +", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "not use this file except in compliance with the License. You may obtain", "the License. import tornado.web import logging import time import sys import os import", "it filepath = path + \"/static/image-verify/\" + _datehour + \"/\" + _id +", "img_url = img_url + '/static/image-verify/' + _datehour + \"/\" + _id + '.gif'", "= cur_file_dir() logging.debug(\"got path %r\", path) if not os.path.exists(path + \"/static/image-verify/\" + _datehour):", "path) if not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour)", "self.errCode = errCode self.errMsg = errMsg self.code = code self.imgUrl = imgUrl #", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "with the License. You may obtain # a copy of the License at", "= errCode self.errMsg = errMsg self.code = code self.imgUrl = imgUrl # /api/image-verify", "for the specific language governing permissions and limitations # under the License. import", "logging.debug(\"got path %r\", path) if not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path +", "+ \"/\" + _id + '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url =", "= self.request.protocol + \"://\" + self.request.host img_url = img_url + '/static/image-verify/' + _datehour", "from tornado.httpclient import * from tornado.httputil import url_concat from tornado_swagger import swagger from", "json_encode, json_decode from tornado.httpclient import * from tornado.httputil import url_concat from tornado_swagger import", "* from base_handler import * from tornado.escape import json_encode, json_decode from tornado.httpclient import", "import os import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import * from", "# To save it filepath = path + \"/static/image-verify/\" + _datehour + \"/\"", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "filepath = path + \"/static/image-verify/\" + _datehour + \"/\" + _id + '.gif'", "+ \"://\" + self.request.host img_url = img_url + '/static/image-verify/' + _datehour + \"/\"", "self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir()", "License for the specific language governing permissions and limitations # under the License.", "import json_encode, json_decode from tornado.httpclient import * from tornado.httputil import url_concat from tornado_swagger", "def __init__(self, errCode, errMsg, code, imgUrl): self.errCode = errCode self.errMsg = errMsg self.code", "utf-8_*_ # # Copyright 2016-2017 <EMAIL> # <EMAIL> # # Licensed under the", "2.0 (the \"License\"); you may # not use this file except in compliance", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "sys import os import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import *", "= imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype:", "import tornado.web import logging import time import sys import os import json as", "@rtype: L{ImageResp} @raise 400: Invalid Input @raise 500: Internal Server Error \"\"\" logging.info(\"POST", "+ _id + '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol +", "import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const import", "Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp()", "swagger from image_verify import generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode, errMsg, code,", "img_url = self.request.protocol + \"://\" + self.request.host img_url = img_url + '/static/image-verify/' +", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "\"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) # To save it filepath", "generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode, errMsg, code, imgUrl): self.errCode = errCode", "<EMAIL> # <EMAIL> # # Licensed under the Apache License, Version 2.0 (the", "= timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\", path) if not os.path.exists(path +", "import url_concat from tornado_swagger import swagger from image_verify import generate_verify_image @swagger.model() class ImageResp:", "use this file except in compliance with the License. You may obtain #", "+ \"/static/image-verify/\" + _datehour) # To save it filepath = path + \"/static/image-verify/\"", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "+ \"/static/image-verify/\" + _datehour + \"/\" + _id + '.gif' mstream, _code =", "path %r\", path) if not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\"", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "compliance with the License. You may obtain # a copy of the License", "code self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description:", "License, Version 2.0 (the \"License\"); you may # not use this file except", "tornado.web import logging import time import sys import os import json as JSON", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "cur_file_dir() logging.debug(\"got path %r\", path) if not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "L{ImageResp} @raise 400: Invalid Input @raise 500: Internal Server Error \"\"\" logging.info(\"POST %r\",", "* from global_const import * from base_handler import * from tornado.escape import json_encode,", "_code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\" + self.request.host img_url =", "timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\", path)", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "implied. See the # License for the specific language governing permissions and limitations", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "OF ANY KIND, either express or implied. See the # License for the", "500: Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "from comm import * from global_const import * from base_handler import * from", "_datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) # To save it filepath = path", "+ _datehour + \"/\" + _id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\",", "Copyright 2016-2017 <EMAIL> # <EMAIL> # # Licensed under the Apache License, Version", "import sys import os import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "= code self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\"", "%r\", path) if not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" +", "+ \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) # To save it", "logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path", "current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\", path) if not", "you may # not use this file except in compliance with the License.", "%r\", self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp() _datehour = timestamp_to_datehour(timestamp) path =", "agreed to in writing, software # distributed under the License is distributed on", "Input @raise 500: Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str()", "\"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input @raise 500: Internal Server", "__init__(self, errCode, errMsg, code, imgUrl): self.errCode = errCode self.errMsg = errMsg self.code =", "+ '/static/image-verify/' + _datehour + \"/\" + _id + '.gif' logging.info(\"Success[200]: generate image-verify", "(the \"License\"); you may # not use this file except in compliance with", "self.code = code self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self):", "= current_timestamp() _datehour = timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\", path) if", "may # not use this file except in compliance with the License. You", "KIND, either express or implied. See the # License for the specific language", "time import sys import os import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm", "image_verify import generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode, errMsg, code, imgUrl): self.errCode", "@swagger.model() class ImageResp: def __init__(self, errCode, errMsg, code, imgUrl): self.errCode = errCode self.errMsg", "# /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise", "'/static/image-verify/' + _datehour + \"/\" + _id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r]", "_datehour + \"/\" + _id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code,", "limitations # under the License. import tornado.web import logging import time import sys", "either express or implied. See the # License for the specific language governing", "To save it filepath = path + \"/static/image-verify/\" + _datehour + \"/\" +", "# # Unless required by applicable law or agreed to in writing, software", "/api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400:", "file except in compliance with the License. You may obtain # a copy", "logging import time import sys import os import json as JSON # 启用别名,不会跟方法里的局部变量混淆", "# # Copyright 2016-2017 <EMAIL> # <EMAIL> # # Licensed under the Apache", "this file except in compliance with the License. You may obtain # a", "# Unless required by applicable law or agreed to in writing, software #", "JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const import * from base_handler", "from tornado.httputil import url_concat from tornado_swagger import swagger from image_verify import generate_verify_image @swagger.model()", "by applicable law or agreed to in writing, software # distributed under the", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "and limitations # under the License. import tornado.web import logging import time import", "from base_handler import * from tornado.escape import json_encode, json_decode from tornado.httpclient import *", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "or implied. See the # License for the specific language governing permissions and", "tornado.escape import json_encode, json_decode from tornado.httpclient import * from tornado.httputil import url_concat from", "not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) # To", "2016-2017 <EMAIL> # <EMAIL> # # Licensed under the Apache License, Version 2.0", "_id + '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\"", "_id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code, img_url) self.set_status(200) # Success", "under the License. import tornado.web import logging import time import sys import os", "from tornado_swagger import swagger from image_verify import generate_verify_image @swagger.model() class ImageResp: def __init__(self,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "comm import * from global_const import * from base_handler import * from tornado.escape", "+ \"/\" + _id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code, img_url)", "code, imgUrl): self.errCode = errCode self.errMsg = errMsg self.code = code self.imgUrl =", "Invalid Input @raise 500: Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id =", "language governing permissions and limitations # under the License. import tornado.web import logging", "if not os.path.exists(path + \"/static/image-verify/\" + _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) #", "base_handler import * from tornado.escape import json_encode, json_decode from tornado.httpclient import * from", "License. You may obtain # a copy of the License at # #", "@raise 400: Invalid Input @raise 500: Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri)", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "= img_url + '/static/image-verify/' + _datehour + \"/\" + _id + '.gif' logging.info(\"Success[200]:", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "import * from tornado.escape import json_encode, json_decode from tornado.httpclient import * from tornado.httputil", "json as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const import *", "errCode self.errMsg = errMsg self.code = code self.imgUrl = imgUrl # /api/image-verify class", "mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\" + self.request.host img_url", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "path = cur_file_dir() logging.debug(\"got path %r\", path) if not os.path.exists(path + \"/static/image-verify/\" +", "class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "os.makedirs(path + \"/static/image-verify/\" + _datehour) # To save it filepath = path +", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "import * from global_const import * from base_handler import * from tornado.escape import", "import * from tornado.httputil import url_concat from tornado_swagger import swagger from image_verify import", "self.errMsg = errMsg self.code = code self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler):", "ANY KIND, either express or implied. See the # License for the specific", "the # License for the specific language governing permissions and limitations # under", "except in compliance with the License. You may obtain # a copy of", "tornado_swagger import swagger from image_verify import generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "coding: utf-8_*_ # # Copyright 2016-2017 <EMAIL> # <EMAIL> # # Licensed under", "+ _id + '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code, img_url) self.set_status(200) #", "timestamp_to_datehour(timestamp) path = cur_file_dir() logging.debug(\"got path %r\", path) if not os.path.exists(path + \"/static/image-verify/\"", "# <EMAIL> # # Licensed under the Apache License, Version 2.0 (the \"License\");", "import logging import time import sys import os import json as JSON #", "from global_const import * from base_handler import * from tornado.escape import json_encode, json_decode", "= generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\" + self.request.host img_url = img_url", "* from tornado.httputil import url_concat from tornado_swagger import swagger from image_verify import generate_verify_image", "to in writing, software # distributed under the License is distributed on an", "Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp() _datehour =", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "+ self.request.host img_url = img_url + '/static/image-verify/' + _datehour + \"/\" + _id", "governing permissions and limitations # under the License. import tornado.web import logging import", "# _*_ coding: utf-8_*_ # # Copyright 2016-2017 <EMAIL> # <EMAIL> # #", "url_concat from tornado_swagger import swagger from image_verify import generate_verify_image @swagger.model() class ImageResp: def", "global_const import * from base_handler import * from tornado.escape import json_encode, json_decode from", "# under the License. import tornado.web import logging import time import sys import", "required by applicable law or agreed to in writing, software # distributed under", "+ _datehour) # To save it filepath = path + \"/static/image-verify/\" + _datehour", "post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp} @raise 400: Invalid Input @raise 500: Internal", "applicable law or agreed to in writing, software # distributed under the License", "import time import sys import os import json as JSON # 启用别名,不会跟方法里的局部变量混淆 from", "Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp = current_timestamp() _datehour", "* from tornado.escape import json_encode, json_decode from tornado.httpclient import * from tornado.httputil import", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "as JSON # 启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const import * from", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "License. import tornado.web import logging import time import sys import os import json", "json_decode from tornado.httpclient import * from tornado.httputil import url_concat from tornado_swagger import swagger", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "= errMsg self.code = code self.imgUrl = imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post')", "400: Invalid Input @raise 500: Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id", "\"/\" + _id + '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol", "specific language governing permissions and limitations # under the License. import tornado.web import", "self.request.protocol + \"://\" + self.request.host img_url = img_url + '/static/image-verify/' + _datehour +", "<EMAIL> # # Licensed under the Apache License, Version 2.0 (the \"License\"); you", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "in compliance with the License. You may obtain # a copy of the", "@raise 500: Internal Server Error \"\"\" logging.info(\"POST %r\", self.request.uri) _id = generate_uuid_str() timestamp", "# not use this file except in compliance with the License. You may", "+ '.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code, img_url) self.set_status(200) # Success self.write(JSON.dumps({\"errCode\":200,\"errMsg\":\"Success\",\"code\":_code,\"imageUrl\":img_url}))", "#!/usr/bin/env python # _*_ coding: utf-8_*_ # # Copyright 2016-2017 <EMAIL> # <EMAIL>", "or agreed to in writing, software # distributed under the License is distributed", "启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const import * from base_handler import *", "import * from base_handler import * from tornado.escape import json_encode, json_decode from tornado.httpclient", "import generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode, errMsg, code, imgUrl): self.errCode =", "\"/static/image-verify/\" + _datehour) # To save it filepath = path + \"/static/image-verify/\" +", "\"/static/image-verify/\" + _datehour + \"/\" + _id + '.gif' mstream, _code = generate_verify_image(save_img=True,", "from tornado.escape import json_encode, json_decode from tornado.httpclient import * from tornado.httputil import url_concat", "generate_verify_image(save_img=True, filepath=filepath) img_url = self.request.protocol + \"://\" + self.request.host img_url = img_url +", "# License for the specific language governing permissions and limitations # under the", "save it filepath = path + \"/static/image-verify/\" + _datehour + \"/\" + _id", "imgUrl): self.errCode = errCode self.errMsg = errMsg self.code = code self.imgUrl = imgUrl", "+ _datehour): os.makedirs(path + \"/static/image-verify/\" + _datehour) # To save it filepath =", "errCode, errMsg, code, imgUrl): self.errCode = errCode self.errMsg = errMsg self.code = code", "imgUrl # /api/image-verify class ApiImageVerifyXHR(tornado.web.RequestHandler): @swagger.operation(nickname='post') def post(self): \"\"\" @description: 生成图片校验码 @rtype: L{ImageResp}", "'.gif' logging.info(\"Success[200]: generate image-verify code=[%r] img_url=[%r]\", _code, img_url) self.set_status(200) # Success self.write(JSON.dumps({\"errCode\":200,\"errMsg\":\"Success\",\"code\":_code,\"imageUrl\":img_url})) self.finish()", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "permissions and limitations # under the License. import tornado.web import logging import time", "import swagger from image_verify import generate_verify_image @swagger.model() class ImageResp: def __init__(self, errCode, errMsg,", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "ImageResp: def __init__(self, errCode, errMsg, code, imgUrl): self.errCode = errCode self.errMsg = errMsg", "path + \"/static/image-verify/\" + _datehour + \"/\" + _id + '.gif' mstream, _code", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "python # _*_ coding: utf-8_*_ # # Copyright 2016-2017 <EMAIL> # <EMAIL> #", "# 启用别名,不会跟方法里的局部变量混淆 from comm import * from global_const import * from base_handler import", "in writing, software # distributed under the License is distributed on an \"AS", "_*_ coding: utf-8_*_ # # Copyright 2016-2017 <EMAIL> # <EMAIL> # # Licensed", "Version 2.0 (the \"License\"); you may # not use this file except in", "_datehour) # To save it filepath = path + \"/static/image-verify/\" + _datehour +", "_datehour + \"/\" + _id + '.gif' mstream, _code = generate_verify_image(save_img=True, filepath=filepath) img_url", "img_url + '/static/image-verify/' + _datehour + \"/\" + _id + '.gif' logging.info(\"Success[200]: generate" ]
[]
[ "ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x:", "handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if __name__", "path from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import params def process_raw(input_dir, output_dir,", "as pd import os from os import path from scripts.process_raw import keep_positive_ratings, count_filter", "u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with", "as handle: pickle.dump(x2i, handle) if __name__ == '__main__': common_params = params['ml']['common'] proc_params =", "print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId',", "'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir,", "params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0])", "print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind for ind, user", "movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])})", "ind, user in enumerate(ds['userId'].unique())} x2i = {movie: ind for ind, movie in enumerate(ds['movieId'].unique())}", "'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds,", "movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left", "import keep_positive_ratings, count_filter from scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds", "user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:',", "'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'),", "x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False)", "pandas as pd import os from os import path from scripts.process_raw import keep_positive_ratings,", "processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir,", "users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind for ind, user in", "print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds", "def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall", "'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId')", "os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with", "pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if __name__ ==", "'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId',", "import pandas as pd import os from os import path from scripts.process_raw import", "pickle.dump(x2i, handle) if __name__ == '__main__': common_params = params['ml']['common'] proc_params = params['ml']['process_raw'] process_raw(common_params['raw_dir'],", "user in enumerate(ds['userId'].unique())} x2i = {movie: ind for ind, movie in enumerate(ds['movieId'].unique())} processed", "os from os import path from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import", "print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold,", "{movie: ind for ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]),", "in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if", "scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall", "with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if __name__ == '__main__': common_params", "'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique()))", "= count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:',", "= pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds", "pd import os from os import path from scripts.process_raw import keep_positive_ratings, count_filter from", "output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique()))", "'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i =", "'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user:", "scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold):", "len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind for ind, user in enumerate(ds['userId'].unique())}", "ind for ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item':", "x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb')", "= pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir)", "open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if __name__ == '__main__': common_params =", "movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall", "= {user: ind for ind, user in enumerate(ds['userId'].unique())} x2i = {movie: ind for", "count_filter from scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir,", "not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i,", "process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:',", "ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left", "for ind, user in enumerate(ds['userId'].unique())} x2i = {movie: ind for ind, movie in", "enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not", "import path from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import params def process_raw(input_dir,", "records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind for", "from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold,", "for ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda", "pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir,", "print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind for ind, user in enumerate(ds['userId'].unique())} x2i", "print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind", "enumerate(ds['userId'].unique())} x2i = {movie: ind for ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user':", "index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb')", "ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'),", "'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold,", "keep_positive_ratings, count_filter from scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds =", "len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds,", "= keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds =", "if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle:", "count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique()))", "'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if __name__ == '__main__': common_params = params['ml']['common']", "'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle)", "path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle)", "len(ds['movieId'].unique())) u2i = {user: ind for ind, user in enumerate(ds['userId'].unique())} x2i = {movie:", "ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i = {user: ind for ind,", "ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'),", "user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:', len(ds['movieId'].unique())) u2i", "with open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as", "ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique()))", "'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:',", "import os from os import path from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config", "x2i[x])}) if not path.exists(output_dir): os.makedirs(output_dir) processed.to_csv(path.join(output_dir, 'ds.csv'), index=False) with open(path.join(output_dir, 'u2i.pickle'), 'wb') as", "count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0])", "users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds =", "'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i,", "from scripts.config import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv'))", "ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating')", "ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds", "from os import path from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import params", "{user: ind for ind, user in enumerate(ds['userId'].unique())} x2i = {movie: ind for ind,", "len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId')", "ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left", "'wb') as handle: pickle.dump(x2i, handle) if __name__ == '__main__': common_params = params['ml']['common'] proc_params", "handle) if __name__ == '__main__': common_params = params['ml']['common'] proc_params = params['ml']['process_raw'] process_raw(common_params['raw_dir'], common_params['proc_dir'],", "pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds =", "os import path from scripts.process_raw import keep_positive_ratings, count_filter from scripts.config import params def", "if __name__ == '__main__': common_params = params['ml']['common'] proc_params = params['ml']['process_raw'] process_raw(common_params['raw_dir'], common_params['proc_dir'], int(proc_params['movie_users_threshold']),", "as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if", "handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle: pickle.dump(x2i, handle) if __name__ == '__main__':", "handle: pickle.dump(x2i, handle) if __name__ == '__main__': common_params = params['ml']['common'] proc_params = params['ml']['process_raw']", "import params def process_raw(input_dir, output_dir, movie_users_threshold, user_movies_threshold): ds = pd.read_csv(path.join(input_dir, 'ratings.csv')) print('Overall records:',", "x2i = {movie: ind for ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda", "= {movie: ind for ind, movie in enumerate(ds['movieId'].unique())} processed = pd.DataFrame({'user': ds['userId'].apply(lambda x:", "__name__ == '__main__': common_params = params['ml']['common'] proc_params = params['ml']['process_raw'] process_raw(common_params['raw_dir'], common_params['proc_dir'], int(proc_params['movie_users_threshold']), int(proc_params['user_movies_threshold']))", "movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId',", "= count_filter(ds, user_movies_threshold, 'userId', 'movieId') print('Left records:', ds.shape[0]) print('Left users:', len(ds['userId'].unique())) print('Left movies:',", "movies:', len(ds['movieId'].unique())) u2i = {user: ind for ind, user in enumerate(ds['userId'].unique())} x2i =", "processed = pd.DataFrame({'user': ds['userId'].apply(lambda x: u2i[x]), 'item': ds['movieId'].apply(lambda x: x2i[x])}) if not path.exists(output_dir):", "open(path.join(output_dir, 'u2i.pickle'), 'wb') as handle: pickle.dump(u2i, handle) with open(path.join(output_dir, 'x2i.pickle'), 'wb') as handle:", "u2i = {user: ind for ind, user in enumerate(ds['userId'].unique())} x2i = {movie: ind", "records:', ds.shape[0]) print('Overall users:', len(ds['userId'].unique())) print('Overall movies:', len(ds['movieId'].unique())) ds = keep_positive_ratings(ds, 'userId', 'movieId',", "ind for ind, user in enumerate(ds['userId'].unique())} x2i = {movie: ind for ind, movie", "import pickle import pandas as pd import os from os import path from", "in enumerate(ds['userId'].unique())} x2i = {movie: ind for ind, movie in enumerate(ds['movieId'].unique())} processed =", "keep_positive_ratings(ds, 'userId', 'movieId', 'rating') ds = count_filter(ds, movie_users_threshold, 'movieId', 'userId') ds = count_filter(ds,", "pickle import pandas as pd import os from os import path from scripts.process_raw" ]
[ "as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of folds: {}\\n\".format(num_folds))", "[6910, 1996, 2019, 153, 12, 5, 1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name,", "write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\") as f: f.write(\"Recommender class:", "f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of folds: {}\\n\".format(num_folds)) f.write(\"Seed list:", "class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of folds: {}\\n\".format(num_folds)) f.write(\"Seed list: {}\\n\\n\".format(str(seed_list)))", "num_folds, seed_list, results): with open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit", "recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name))", "with open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number", "recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender", "5, 1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with", "666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\") as", "9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\")", "1996, 2019, 153, 12, 5, 1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters,", "1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path,", "seed_list, results): with open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters:", "open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of", "\"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of folds:", "get_seed_list(): return [6910, 1996, 2019, 153, 12, 5, 1010, 9999, 666, 467] def", "f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of folds: {}\\n\".format(num_folds)) f.write(\"Seed", "153, 12, 5, 1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list,", "467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\") as f:", "{}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters)) f.write(\"Number of folds: {}\\n\".format(num_folds)) f.write(\"Seed list: {}\\n\\n\".format(str(seed_list))) f.write(str(results))", "return [6910, 1996, 2019, 153, 12, 5, 1010, 9999, 666, 467] def write_results_on_file(file_path,", "def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results): with open(file_path, \"w\") as f: f.write(\"Recommender", "results): with open(file_path, \"w\") as f: f.write(\"Recommender class: {}\\n\".format(recommender_name)) f.write(\"Recommender fit parameters: {}\\n\".format(recommender_fit_parameters))", "12, 5, 1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds, seed_list, results):", "def get_seed_list(): return [6910, 1996, 2019, 153, 12, 5, 1010, 9999, 666, 467]", "2019, 153, 12, 5, 1010, 9999, 666, 467] def write_results_on_file(file_path, recommender_name, recommender_fit_parameters, num_folds," ]
[ "self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept()", "self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg)", "def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time(): imageFile =", "getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self,", "reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ ==", "* 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if", "self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction(", "= QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\",", "& images functions.set_process_explicit() # So Windows uses logo icon app = QtWidgets.QApplication([]) ui", "self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray)", "self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000)", "= \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the", "def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour,", "self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode']", "Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer =", "= self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage =", "imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files (*.png", "if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']),", "files (*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'],", "script for Circadian Desktops app. Settings file and logo images are stored locally.", "d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True)", "self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False)", "_ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\")", "self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show()", "super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage", "self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg']", "now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile", "imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000)", "self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName =", "fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files (*.png *.jpg *.jpeg", "lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette)", "# To pick up settings & images functions.set_process_explicit() # So Windows uses logo", "import functions from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class", "\"\"\" MainWindow class for the UI. Inherits from Ui_MainWindow, which contains the layout", "self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\")", "= imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] =", "self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin']", "event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or", "= 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self,", "\"\"\" def __init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings", "= fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\",", "d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True)", "def __init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray", "int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']),", "else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason", "else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode']", "__icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__", "self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon()", "locally. Contains MainWindow class and script to run app. \"\"\" import os import", "= 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() >", "get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files", "ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\"", "self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour']", "self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']:", "self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def", "Windows uses logo icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow'", "self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times)", "== 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self):", "which contains the layout of the widgets. \"\"\" def __init__(self, parent=None, settings=None): #", "self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self):", "or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick", "sys from PyQt5 import QtCore, QtGui, QtWidgets import custom_qt import functions from ui_mainwindow", "widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style)", "run app. \"\"\" import os import sys from PyQt5 import QtCore, QtGui, QtWidgets", "methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times)", "imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None,", "self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] =", "QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv: functions.set_background_priority(True) else: ui.show() app.setWindowIcon(QtGui.QIcon(logoFile))", "= self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath,", "imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _", "imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage", "self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray", "= 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow']", "< now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time():", "self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() <", "self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True)", "< now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time():", "# tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu =", "== 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from", "functions.get_settings(settings) self.activeImage = '' # connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect(", "60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time():", "else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow()", "def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] =", "def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if", "* 60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() < now <=", "60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']):", "self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile", "tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu()", "0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False)", "self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime()", "folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value()", "to run app. \"\"\" import os import sys from PyQt5 import QtCore, QtGui,", "and script to run app. \"\"\" import os import sys from PyQt5 import", "logo images are stored locally. Contains MainWindow class and script to run app.", "settings self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage = '' # connect widgets", "Settings file and logo images are stored locally. Contains MainWindow class and script", "self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0", "self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian", "self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time(): imageFile", "= self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] =", "class and script to run app. \"\"\" import os import sys from PyQt5", "__init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray =", "= functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter)", "0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] =", "UI. Inherits from Ui_MainWindow, which contains the layout of the widgets. \"\"\" def", "int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False)", "= self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] =", "connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg))", "= self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore()", "elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg']", "self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel):", "if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason ==", "functions.set_background_priority(True) def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window()", "= 1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self,", "event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour']", "from Ui_MainWindow, which contains the layout of the widgets. \"\"\" def __init__(self, parent=None,", "= self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile", "if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True)", "else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times()", "self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime(", "elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now", "self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else:", "__name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images functions.set_process_explicit() #", "self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str,", "self.timeDawn.time() < now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now <=", "self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage = ''", "To pick up settings & images functions.set_process_explicit() # So Windows uses logo icon", "self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime()", "\"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def", "self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator()", "load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']),", "class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the UI. Inherits from Ui_MainWindow, which", "logo icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv:", "= True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show()", "0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0))", "from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime =", "= functions.get_settings(settings) self.activeImage = '' # connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg))", "pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self,", "else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray =", "self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now", "self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg)", "settings & images functions.set_process_explicit() # So Windows uses logo icon app = QtWidgets.QApplication([])", "to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time)", "self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette()", "QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window)", "self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour()", "self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if", "*.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg)", "self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState)", "int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0))", "int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True)", "= self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes']", "contains the layout of the widgets. \"\"\" def __init__(self, parent=None, settings=None): # setup", "of the widgets. \"\"\" def __init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self)", "QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ =", "1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] =", "imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled(", "if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images functions.set_process_explicit()", "1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")()", "self.timeDusk.time() < now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if", "from PyQt5 import QtCore, QtGui, QtWidgets import custom_qt import functions from ui_mainwindow import", "self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime(", "None, \"Select image\", \"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName,", "\"\"\" Main script for Circadian Desktops app. Settings file and logo images are", "self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] *", "if self.timeDawn.time() < now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now", "str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap =", "\"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the UI.", "fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime']", "'' # connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect(", "if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self):", "= MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv: functions.set_background_priority(True) else: ui.show() app.setWindowIcon(QtGui.QIcon(logoFile)) ui.setWindowIcon(QtGui.QIcon(logoFile)) sys.exit(app.exec_())", "imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def", "0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times()", "self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else:", "elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False)", "else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True)", "# setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False self.settings =", "lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) #", "d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self):", "QtGui, QtWidgets import custom_qt import functions from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\"", "# timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data", "timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'],", "uses logo icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in", "self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile =", "self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap", "app. Settings file and logo images are stored locally. Contains MainWindow class and", "set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def", "self.show() def close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes']", "d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour,", "self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self):", "self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if", "0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] =", "1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime:", "self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']),", "Circadian Desktops app. Settings file and logo images are stored locally. Contains MainWindow", "self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\",", "self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked():", "def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap =", "self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True)", "self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() <", "Inherits from Ui_MainWindow, which contains the layout of the widgets. \"\"\" def __init__(self,", "self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour()", "self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated)", "= QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName", "startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def", "== \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images functions.set_process_explicit() # So", "QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() <", "self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin']", "self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian", "import Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow", "widgets. \"\"\" def __init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath =", "= QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\") if", "self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile)", "set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText()", "self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime)", "<reponame>Luke943/CircadianDesktops<gh_stars>0 \"\"\" Main script for Circadian Desktops app. Settings file and logo images", "<= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile =", "self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def", "== QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) #", "os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images functions.set_process_explicit() # So Windows uses", "QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg)", "self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def", "1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute()", "So Windows uses logo icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if", "def close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] =", "def set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif", "def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0", "= QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'],", "# So Windows uses logo icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion')", "*.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'],", "if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(),", "parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False", "icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open", "else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray']", "the layout of the widgets. \"\"\" def __init__(self, parent=None, settings=None): # setup super(MainWindow,", "= False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def", "self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop()", "import sys from PyQt5 import QtCore, QtGui, QtWidgets import custom_qt import functions from", "are stored locally. Contains MainWindow class and script to run app. \"\"\" import", "self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else:", "images functions.set_process_explicit() # So Windows uses logo icon app = QtWidgets.QApplication([]) ui =", "def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime(", "= 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] =", "self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single image':", "(*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg)", "custom_qt import functions from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\"", "self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon =", "self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] =", "QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\") if fileName:", "self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0", "self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour,", "Ui_MainWindow, which contains the layout of the widgets. \"\"\" def __init__(self, parent=None, settings=None):", "self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile))", "= QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu)", "self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer", "= \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the UI. Inherits from", "\"Image files (*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self):", "self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1)", "QtCore, QtGui, QtWidgets import custom_qt import functions from ui_mainwindow import Ui_MainWindow settingsFile =", "set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] =", "!= self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single", "event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason ==", "= QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif self.timeDay.time()", "= settings self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage = '' # connect", "pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel):", "script to run app. \"\"\" import os import sys from PyQt5 import QtCore,", "self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick", "icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv: functions.set_background_priority(True)", "= self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def", "Ui_MainWindow): \"\"\" MainWindow class for the UI. Inherits from Ui_MainWindow, which contains the", "setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False self.settings = functions.get_settings(settings)", "def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime']", "self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False)", "newTime = self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value()", "show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True self.close()", "if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True self.settings['minimizeToTray']", "functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if", "self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time()", "now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile !=", "if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] =", "fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap", "now = QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time(): imageFile = self.settings['labelDDImg'] elif", "close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1", "= self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] =", "self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def", "self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if", "= functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute,", "= self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self):", "\"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer =", "0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray", "self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop()", "self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName)", "QtWidgets import custom_qt import functions from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile", "QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To", "int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else:", "d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True)", "self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute,", "functions.set_process_explicit() # So Windows uses logo icon app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile)", "up settings & images functions.set_process_explicit() # So Windows uses logo icon app =", "functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] =", "if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText()", "functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()]", "QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def", "self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour()", "functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop()", "stored locally. Contains MainWindow class and script to run app. \"\"\" import os", "pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] =", "0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray =", "Desktops app. Settings file and logo images are stored locally. Contains MainWindow class", "def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True", "imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName,", "Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer()", "def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image", "self.settings = functions.get_settings(settings) self.activeImage = '' # connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda:", "= self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']),", "from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow):", "0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']:", "self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0))", "now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile", "\"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self, event):", "self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray", "60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000 if", "settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for", "self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d =", "self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate", "self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] =", "self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']),", "import QtCore, QtGui, QtWidgets import custom_qt import functions from ui_mainwindow import Ui_MainWindow settingsFile", "self.timeDay.time() < now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now <=", "self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage =", "MainWindow class and script to run app. \"\"\" import os import sys from", "self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage = '' # connect widgets to", "lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon", "if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() ==", "reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up", "QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences()", "self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if", "self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide()", "shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self):", "for Circadian Desktops app. Settings file and logo images are stored locally. Contains", "self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray", "\"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the UI. Inherits from Ui_MainWindow,", "if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\":", "file and logo images are stored locally. Contains MainWindow class and script to", "= QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv: functions.set_background_priority(True) else: ui.show()", "imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0", "fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap)", "self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour)", "= 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray =", "pick up settings & images functions.set_process_explicit() # So Windows uses logo icon app", "Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu = QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit", "self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName", "QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(),", "functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0))", "app. \"\"\" import os import sys from PyQt5 import QtCore, QtGui, QtWidgets import", "MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the UI. Inherits from Ui_MainWindow, which contains", "self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0))", "image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime']", "minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True", "0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason):", "def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if", "self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime(", "images are stored locally. Contains MainWindow class and script to run app. \"\"\"", "self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin']", "self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow']", "# connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda:", "self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else:", "custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute,", "self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked():", "functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True self.close() def", "self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings)", "self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName:", "True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def", "self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason", "= 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup']", "and logo images are stored locally. Contains MainWindow class and script to run", "self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile = self.settings['labelDDImg']", "self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images", "if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']:", "self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop)", "self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName) pixmap = pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio)", "the widgets. \"\"\" def __init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath", "populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def", "= self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time()", "self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if", "logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class for the UI. Inherits", "self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1", "self.hide() functions.set_background_priority(True) def __icon_activated(self, reason): if reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger:", "import custom_qt import functions from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile =", "False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def show_window(self):", "= QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times()", "if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette())", "self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow", "= True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] =", "self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime(", "self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime(", "def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def", "set_image(self, fileName: str, imageLbl: QtWidgets.QLabel): if self.settings['isSlideshow']: fileName = functions.random_image(fileName) pixmap = QtGui.QPixmap(fileName)", "'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders':", "= True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1", "0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime'])", "imageFile = self.settings['labelDDImg'] elif self.timeDay.time() < now <= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif", "default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime(", "def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0", "self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray = True self.settings['minimizeToTray'] = 0 def show_window(self): functions.set_background_priority(False)", "self.activeImage = '' # connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda:", "self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette())", "self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times()", "d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if", "Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer", "closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute()", "self.spinShuffleTime.value() * 60000 if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self):", "* 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() * 60000", "else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1", "app = QtWidgets.QApplication([]) ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv: functions.set_background_priority(True) else:", "self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon", "self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime = self.spinShuffleTime.value() *", "= 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else:", "self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers", "imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else:", "self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute()", "self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now =", "<= self.timeDusk.time(): imageFile = self.settings['labelDayImg'] elif self.timeDusk.time() < now <= self.timeNight.time(): imageFile =", "self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True)", "QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\", \"Image files (*.png *.jpg", "MainWindow class for the UI. Inherits from Ui_MainWindow, which contains the layout of", "for the UI. Inherits from Ui_MainWindow, which contains the layout of the widgets.", "self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True", "imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() ==", "d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute,", "self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] *", "class for the UI. Inherits from Ui_MainWindow, which contains the layout of the", "*.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'],", "self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) # populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'],", "settings=None): # setup super(MainWindow, self).__init__(parent) self.setupUi(self) self.settingsPath = settings self.isClosedFromTray = False self.settings", "int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False)", "os import sys from PyQt5 import QtCore, QtGui, QtWidgets import custom_qt import functions", "= False self.settings = functions.get_settings(settings) self.activeImage = '' # connect widgets to methods", "if self.shuffleTimer.remainingTime() > newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime(", "int(self.settings['dawnmin']), 0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']),", "data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self,", "< now <= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile", "self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if", "reason == QtWidgets.QSystemTrayIcon.DoubleClick or reason == QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__)))", "d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour,", "imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName(", "True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else:", "Contains MainWindow class and script to run app. \"\"\" import os import sys", "True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour'] = self.timeDawn.time().hour()", "self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.set_desktop() def set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() < now", "int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def", "self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style() def set_image(self, fileName: str, imageLbl:", "'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1 self.spinShuffleTime.setReadOnly(False) def set_shuffle_time(self): newTime", "0)) self.timeDay.setTime(QtCore.QTime( int(self.settings['dayhour']), int(self.settings['daymin']), 0)) self.timeDusk.setTime(QtCore.QTime( int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0))", "self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self): d", "= 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup']", "False self.settings = functions.get_settings(settings) self.activeImage = '' # connect widgets to methods self.btnSelectDayImg.clicked.connect(", "1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False", "the UI. Inherits from Ui_MainWindow, which contains the layout of the widgets. \"\"\"", "self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self, event): if", "self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian Desktops\") self.trayIcon.activated.connect(self.__icon_activated) self.trayIcon.show() self.trayMenu", "fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select image\", \"\",", "self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray = True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked():", "self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if", "Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow, Ui_MainWindow): \"\"\" MainWindow class", "<= self.timeNight.time(): imageFile = self.settings['labelDDImg'] else: imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage:", "int(self.settings['duskhour']), int(self.settings['duskmin']), 0)) self.timeNight.setTime(QtCore.QTime( int(self.settings['nighthour']), int(self.settings['nightmin']), 0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self):", "= self.timeDawn.time().hour() self.settings['dawnmin'] = self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] =", "self.isClosedFromTray = True self.close() def closeEvent(self, event): if self.radioCustomTimes.isChecked(): self.settings['isCustomTimes'] = 1 self.settings['dawnhour']", "self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray: event.accept() else: event.ignore() self.hide() functions.set_background_priority(True) def", "PyQt5 import QtCore, QtGui, QtWidgets import custom_qt import functions from ui_mainwindow import Ui_MainWindow", "QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings &", "= pixmap.scaled( imageLbl.width(), imageLbl.height(), QtCore.Qt.KeepAspectRatio) imageLbl.setPixmap(pixmap) imageLbl.setAlignment(QtCore.Qt.AlignCenter) self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl:", "self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour'] = self.timeNight.time().hour() self.settings['nightmin']", "else: imageFile = self.settings['labelNightImg'] if imageFile != self.activeImage: functions.set_desktop(imageFile) self.activeImage = imageFile self.mainTimer.start(60000)", "self.mainTimer.start(60000) def set_background_style(self): if self.comboBox.currentText() == 'single image': self.shuffleTimer.stop() self.settings['isSlideshow'] = 0 self.spinShuffleTime.setReadOnly(True)", "def default_times(self): d = functions.get_times() self.timeDawn.setTime(QtCore.QTime( d['dawn'].hour, d['dawn'].minute, 0)) self.timeDay.setTime(QtCore.QTime( d['sunrise'].hour, d['sunrise'].minute, 0))", "0)) self.timeDusk.setTime(QtCore.QTime( d['sunset'].hour, d['sunset'].minute, 0)) self.timeNight.setTime(QtCore.QTime( d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True)", "self.settingsPath = settings self.isClosedFromTray = False self.settings = functions.get_settings(settings) self.activeImage = '' #", "if self.settings['minimizeToTray']: self.boxMinimize.setChecked(True) else: self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if", "self.isClosedFromTray = True if self.settings['runOnStartup']: self.boxStartup.setChecked(True) def set_palette(self): if self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] =", "Main script for Circadian Desktops app. Settings file and logo images are stored", "self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer()", "# populate data self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg) self.load_times() self.load_preferences() self.set_desktop() self.set_background_style()", "== QtWidgets.QSystemTrayIcon.Trigger: self.show_window() if __name__ == \"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings", "0)) self.custom_times() self.radioCustomTimes.setChecked(True) else: self.default_times() def custom_times(self): self.timeDawn.setReadOnly(False) self.timeDay.setReadOnly(False) self.timeDusk.setReadOnly(False) self.timeNight.setReadOnly(False) def default_times(self):", "functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self): if self.boxMinimize.isChecked():", "self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images) #", "self.boxDark.isChecked(): self.setPalette(custom_qt.DarkPalette()) self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if", "if self.boxStartup.isChecked(): functions.run_on_startup(True) self.settings['runOnStartup'] = 1 else: functions.run_on_startup(False) self.settings['runOnStartup'] = 0 def minimize_behaviour(self):", "\"__main__\": os.chdir(os.path.dirname(os.path.abspath(__file__))) # To pick up settings & images functions.set_process_explicit() # So Windows", "= '' # connect widgets to methods self.btnSelectDayImg.clicked.connect( lambda: self.get_image(self.labelDayImg)) self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg))", "functions from ui_mainwindow import Ui_MainWindow settingsFile = \"settings.txt\" logoFile = \"Icons\\\\logo.png\" class MainWindow(QtWidgets.QMainWindow,", "self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour) # tray icon self.trayIcon = QtWidgets.QSystemTrayIcon() self.trayIcon.setIcon(QtGui.QIcon(logoFile)) self.trayIcon.setToolTip(\"Circadian", "def minimize_behaviour(self): if self.boxMinimize.isChecked(): self.isClosedFromTray = False self.settings['minimizeToTray'] = 1 else: self.isClosedFromTray =", "import os import sys from PyQt5 import QtCore, QtGui, QtWidgets import custom_qt import", "if fileName: self.set_image(fileName, imageLbl) self.set_desktop() def shuffle_images(self): self.set_image(self.settings['labelDayImg'], self.labelDayImg) self.set_image(self.settings['labelDDImg'], self.labelDDImg) self.set_image(self.settings['labelNightImg'], self.labelNightImg)", "self.settings[imageLbl.objectName()] = fileName def get_image(self, imageLbl: QtWidgets.QLabel): fileName, _ = QtWidgets.QFileDialog.getOpenFileName( None, \"Select", "self.timeDawn.time().minute() self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute()", "newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']), 0))", "layout of the widgets. \"\"\" def __init__(self, parent=None, settings=None): # setup super(MainWindow, self).__init__(parent)", "= 0 def show_window(self): functions.set_background_priority(False) getattr(self, \"raise\")() self.activateWindow() self.setWindowState(QtCore.Qt.WindowNoState) self.show() def close_from_tray(self): self.isClosedFromTray", "\"Select image\", \"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl)", "self.btnSelectDDImg.clicked.connect( lambda: self.get_image(self.labelDDImg)) self.btnSelectNightImg.clicked.connect( lambda: self.get_image(self.labelNightImg)) self.comboBox.currentIndexChanged.connect(self.set_background_style) self.spinShuffleTime.valueChanged.connect(self.set_shuffle_time) self.radioDefaultTimes.clicked.connect(self.default_times) self.radioCustomTimes.clicked.connect(self.custom_times) self.boxDark.stateChanged.connect(self.set_palette) self.boxMinimize.stateChanged.connect(self.minimize_behaviour) self.boxStartup.stateChanged.connect(self.startup_behaviour)", "\"\"\" import os import sys from PyQt5 import QtCore, QtGui, QtWidgets import custom_qt", "= self.timeNight.time().hour() self.settings['nightmin'] = self.timeNight.time().minute() else: self.settings['isCustomTimes'] = 0 functions.write_settings(self.settingsPath, self.settings) if self.isClosedFromTray:", "set_desktop(self): now = QtCore.QTime.currentTime() if self.timeDawn.time() < now <= self.timeDay.time(): imageFile = self.settings['labelDDImg']", "self.spinShuffleTime.setReadOnly(True) elif self.comboBox.currentText() == 'slideshow from folders': self.shuffleTimer.start(self.settings['shuffleTime'] * 60000) self.settings['isSlideshow'] = 1", "Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) # timers self.mainTimer = QtCore.QTimer() self.mainTimer.timeout.connect(self.set_desktop) self.shuffleTimer = QtCore.QTimer() self.shuffleTimer.timeout.connect(self.shuffle_images)", "ui = MainWindow(settings=settingsFile) app.setStyle('fusion') if '/noshow' in sys.argv: functions.set_background_priority(True) else: ui.show() app.setWindowIcon(QtGui.QIcon(logoFile)) ui.setWindowIcon(QtGui.QIcon(logoFile))", "QtWidgets.QMenu() self.trayMenu.addAction(\"Open Circadian Desktops\", self.show_window) self.trayMenu.addSeparator() self.trayMenu.addAction( \"Exit Circadian Desktops\", self.close_from_tray) self.trayIcon.setContextMenu(self.trayMenu) #", "self.settings['dayhour'] = self.timeDay.time().hour() self.settings['daymin'] = self.timeDay.time().minute() self.settings['duskhour'] = self.timeDusk.time().hour() self.settings['duskmin'] = self.timeDusk.time().minute() self.settings['nighthour']", "image\", \"\", \"Image files (*.png *.jpg *.jpeg *.bmp)\") if fileName: self.set_image(fileName, imageLbl) self.set_desktop()", "d['dusk'].hour, d['dusk'].minute, 0)) self.timeDawn.setReadOnly(True) self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else:", "> newTime: self.shuffleTimer.start(newTime) self.settings['shuffleTime'] = self.spinShuffleTime.value() def load_times(self): if int(self.settings['isCustomTimes']): self.timeDawn.setTime(QtCore.QTime( int(self.settings['dawnhour']), int(self.settings['dawnmin']),", "self.timeDay.setReadOnly(True) self.timeDusk.setReadOnly(True) self.timeNight.setReadOnly(True) def load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']:", "self.settings['isDarkMode'] = 1 else: self.setPalette(QtGui.QPalette()) self.settings['isDarkMode'] = 0 def startup_behaviour(self): if self.boxStartup.isChecked(): functions.run_on_startup(True)", "load_preferences(self): if self.settings['isSlideshow']: self.comboBox.setCurrentIndex(1) else: self.spinShuffleTime.setReadOnly(True) self.spinShuffleTime.setValue(self.settings['shuffleTime']) if self.settings['isDarkMode']: self.boxDark.setChecked(True) self.set_palette() if self.settings['minimizeToTray']:" ]
[ "return self._ip @ip.setter def ip(self, ip: str) -> None: self._ip = ip @property", "bytes) -> None: self._token = token @property def request_token(self) -> bytes: return self._request_token", "str: return self._ip @ip.setter def ip(self, ip: str) -> None: self._ip = ip", "@token.setter def token(self, token: bytes) -> None: self._token = token @property def request_token(self)", "_token = b'' _request_token: bytes = b'' def __eq__(self, other) -> bool: \"\"\"Check", "-> None: self._token = token @property def request_token(self) -> bytes: return self._request_token @request_token.setter", "None: self._ip = ip @property def port(self) -> int: return self._port @port.setter def", "token @property def request_token(self) -> bytes: return self._request_token @request_token.setter def request_token(self, token: bytes)", "port(self, port: int) -> None: self._port = port @property def response(self) -> bool:", "self.port == other.port @property def ip(self) -> str: return self._ip @ip.setter def ip(self,", "response @property def token(self) -> bytes: return self._token @token.setter def token(self, token: bytes)", "ip(self, ip: str) -> None: self._ip = ip @property def port(self) -> int:", "request_token(self) -> bytes: return self._request_token @request_token.setter def request_token(self, token: bytes) -> None: self._request_token", "def request_token(self) -> bytes: return self._request_token @request_token.setter def request_token(self, token: bytes) -> None:", "False _token = b'' _request_token: bytes = b'' def __eq__(self, other) -> bool:", "-> int: return self._port @port.setter def port(self, port: int) -> None: self._port =", "def token(self) -> bytes: return self._token @token.setter def token(self, token: bytes) -> None:", "objects :type other: Server :return: \"\"\" return self.ip == other.ip and self.port ==", "for equality of objects :type other: Server :return: \"\"\" return self.ip == other.ip", "return self._port @port.setter def port(self, port: int) -> None: self._port = port @property", "other.port @property def ip(self) -> str: return self._ip @ip.setter def ip(self, ip: str)", "attributes of MasterServer and GameServer objects\"\"\" _ip: str = '' _port: int =", "_ip: str = '' _port: int = 8300 _response: bool = False _token", "-> None: self._response = response @property def token(self) -> bytes: return self._token @token.setter", "None: self._response = response @property def token(self) -> bytes: return self._token @token.setter def", "str = '' _port: int = 8300 _response: bool = False _token =", ":type other: Server :return: \"\"\" return self.ip == other.ip and self.port == other.port", "\"\"\"Server Model Template, containing properties for same attributes of MasterServer and GameServer objects\"\"\"", "-> None: self._port = port @property def response(self) -> bool: return self._response @response.setter", "other.ip and self.port == other.port @property def ip(self) -> str: return self._ip @ip.setter", "GameServer objects\"\"\" _ip: str = '' _port: int = 8300 _response: bool =", "bool: \"\"\"Check for equality of objects :type other: Server :return: \"\"\" return self.ip", "@property def token(self) -> bytes: return self._token @token.setter def token(self, token: bytes) ->", "_port: int = 8300 _response: bool = False _token = b'' _request_token: bytes", "self._port = port @property def response(self) -> bool: return self._response @response.setter def response(self,", "response(self, response: bool) -> None: self._response = response @property def token(self) -> bytes:", "@port.setter def port(self, port: int) -> None: self._port = port @property def response(self)", "= b'' _request_token: bytes = b'' def __eq__(self, other) -> bool: \"\"\"Check for", "port(self) -> int: return self._port @port.setter def port(self, port: int) -> None: self._port", "for same attributes of MasterServer and GameServer objects\"\"\" _ip: str = '' _port:", "\"\"\" return self.ip == other.ip and self.port == other.port @property def ip(self) ->", "Model Template, containing properties for same attributes of MasterServer and GameServer objects\"\"\" _ip:", "#!/usr/local/bin/python # coding: utf-8 import abc class Server(abc.ABC): \"\"\"Server Model Template, containing properties", "bytes: return self._request_token @request_token.setter def request_token(self, token: bytes) -> None: self._request_token = token", "return self._response @response.setter def response(self, response: bool) -> None: self._response = response @property", "-> None: self._ip = ip @property def port(self) -> int: return self._port @port.setter", "b'' def __eq__(self, other) -> bool: \"\"\"Check for equality of objects :type other:", "response(self) -> bool: return self._response @response.setter def response(self, response: bool) -> None: self._response", ":return: \"\"\" return self.ip == other.ip and self.port == other.port @property def ip(self)", "bytes: return self._token @token.setter def token(self, token: bytes) -> None: self._token = token", "self._token @token.setter def token(self, token: bytes) -> None: self._token = token @property def", "int = 8300 _response: bool = False _token = b'' _request_token: bytes =", "MasterServer and GameServer objects\"\"\" _ip: str = '' _port: int = 8300 _response:", "== other.ip and self.port == other.port @property def ip(self) -> str: return self._ip", "ip: str) -> None: self._ip = ip @property def port(self) -> int: return", "def token(self, token: bytes) -> None: self._token = token @property def request_token(self) ->", "'' _port: int = 8300 _response: bool = False _token = b'' _request_token:", "def response(self, response: bool) -> None: self._response = response @property def token(self) ->", "bool = False _token = b'' _request_token: bytes = b'' def __eq__(self, other)", "ip(self) -> str: return self._ip @ip.setter def ip(self, ip: str) -> None: self._ip", "bool) -> None: self._response = response @property def token(self) -> bytes: return self._token", "port @property def response(self) -> bool: return self._response @response.setter def response(self, response: bool)", "Server :return: \"\"\" return self.ip == other.ip and self.port == other.port @property def", "-> bytes: return self._request_token @request_token.setter def request_token(self, token: bytes) -> None: self._request_token =", "ip @property def port(self) -> int: return self._port @port.setter def port(self, port: int)", "None: self._token = token @property def request_token(self) -> bytes: return self._request_token @request_token.setter def", "port: int) -> None: self._port = port @property def response(self) -> bool: return", "-> bool: \"\"\"Check for equality of objects :type other: Server :return: \"\"\" return", "of objects :type other: Server :return: \"\"\" return self.ip == other.ip and self.port", "@ip.setter def ip(self, ip: str) -> None: self._ip = ip @property def port(self)", "response: bool) -> None: self._response = response @property def token(self) -> bytes: return", "-> bool: return self._response @response.setter def response(self, response: bool) -> None: self._response =", "Template, containing properties for same attributes of MasterServer and GameServer objects\"\"\" _ip: str", "self._response @response.setter def response(self, response: bool) -> None: self._response = response @property def", "# coding: utf-8 import abc class Server(abc.ABC): \"\"\"Server Model Template, containing properties for", "other: Server :return: \"\"\" return self.ip == other.ip and self.port == other.port @property", "Server(abc.ABC): \"\"\"Server Model Template, containing properties for same attributes of MasterServer and GameServer", "= ip @property def port(self) -> int: return self._port @port.setter def port(self, port:", "def port(self) -> int: return self._port @port.setter def port(self, port: int) -> None:", "return self.ip == other.ip and self.port == other.port @property def ip(self) -> str:", "equality of objects :type other: Server :return: \"\"\" return self.ip == other.ip and", "@response.setter def response(self, response: bool) -> None: self._response = response @property def token(self)", "def ip(self, ip: str) -> None: self._ip = ip @property def port(self) ->", "= b'' def __eq__(self, other) -> bool: \"\"\"Check for equality of objects :type", "import abc class Server(abc.ABC): \"\"\"Server Model Template, containing properties for same attributes of", "bytes = b'' def __eq__(self, other) -> bool: \"\"\"Check for equality of objects", "@property def request_token(self) -> bytes: return self._request_token @request_token.setter def request_token(self, token: bytes) ->", "objects\"\"\" _ip: str = '' _port: int = 8300 _response: bool = False", "containing properties for same attributes of MasterServer and GameServer objects\"\"\" _ip: str =", "def ip(self) -> str: return self._ip @ip.setter def ip(self, ip: str) -> None:", "self._token = token @property def request_token(self) -> bytes: return self._request_token @request_token.setter def request_token(self,", "= port @property def response(self) -> bool: return self._response @response.setter def response(self, response:", "== other.port @property def ip(self) -> str: return self._ip @ip.setter def ip(self, ip:", "@property def ip(self) -> str: return self._ip @ip.setter def ip(self, ip: str) ->", "= '' _port: int = 8300 _response: bool = False _token = b''", "abc class Server(abc.ABC): \"\"\"Server Model Template, containing properties for same attributes of MasterServer", "= 8300 _response: bool = False _token = b'' _request_token: bytes = b''", "self._port @port.setter def port(self, port: int) -> None: self._port = port @property def", "int) -> None: self._port = port @property def response(self) -> bool: return self._response", "= False _token = b'' _request_token: bytes = b'' def __eq__(self, other) ->", "8300 _response: bool = False _token = b'' _request_token: bytes = b'' def", "_request_token: bytes = b'' def __eq__(self, other) -> bool: \"\"\"Check for equality of", "token: bytes) -> None: self._token = token @property def request_token(self) -> bytes: return", "__eq__(self, other) -> bool: \"\"\"Check for equality of objects :type other: Server :return:", "token(self) -> bytes: return self._token @token.setter def token(self, token: bytes) -> None: self._token", "self.ip == other.ip and self.port == other.port @property def ip(self) -> str: return", "and self.port == other.port @property def ip(self) -> str: return self._ip @ip.setter def", "int: return self._port @port.setter def port(self, port: int) -> None: self._port = port", "\"\"\"Check for equality of objects :type other: Server :return: \"\"\" return self.ip ==", "coding: utf-8 import abc class Server(abc.ABC): \"\"\"Server Model Template, containing properties for same", "and GameServer objects\"\"\" _ip: str = '' _port: int = 8300 _response: bool", "self._ip = ip @property def port(self) -> int: return self._port @port.setter def port(self,", "self._response = response @property def token(self) -> bytes: return self._token @token.setter def token(self,", "of MasterServer and GameServer objects\"\"\" _ip: str = '' _port: int = 8300", "= token @property def request_token(self) -> bytes: return self._request_token @request_token.setter def request_token(self, token:", "str) -> None: self._ip = ip @property def port(self) -> int: return self._port", "class Server(abc.ABC): \"\"\"Server Model Template, containing properties for same attributes of MasterServer and", "def response(self) -> bool: return self._response @response.setter def response(self, response: bool) -> None:", "= response @property def token(self) -> bytes: return self._token @token.setter def token(self, token:", "-> str: return self._ip @ip.setter def ip(self, ip: str) -> None: self._ip =", "def port(self, port: int) -> None: self._port = port @property def response(self) ->", "properties for same attributes of MasterServer and GameServer objects\"\"\" _ip: str = ''", "_response: bool = False _token = b'' _request_token: bytes = b'' def __eq__(self,", "None: self._port = port @property def response(self) -> bool: return self._response @response.setter def", "@property def port(self) -> int: return self._port @port.setter def port(self, port: int) ->", "@property def response(self) -> bool: return self._response @response.setter def response(self, response: bool) ->", "b'' _request_token: bytes = b'' def __eq__(self, other) -> bool: \"\"\"Check for equality", "bool: return self._response @response.setter def response(self, response: bool) -> None: self._response = response", "token(self, token: bytes) -> None: self._token = token @property def request_token(self) -> bytes:", "other) -> bool: \"\"\"Check for equality of objects :type other: Server :return: \"\"\"", "utf-8 import abc class Server(abc.ABC): \"\"\"Server Model Template, containing properties for same attributes", "same attributes of MasterServer and GameServer objects\"\"\" _ip: str = '' _port: int", "self._ip @ip.setter def ip(self, ip: str) -> None: self._ip = ip @property def", "return self._token @token.setter def token(self, token: bytes) -> None: self._token = token @property", "-> bytes: return self._token @token.setter def token(self, token: bytes) -> None: self._token =", "def __eq__(self, other) -> bool: \"\"\"Check for equality of objects :type other: Server" ]
[ "t.astype(np.float32) T = M.dot(T) # revert height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1.,", "np.diagflat([1., 1., 1., 1.]) H[1, 1] = -1.0 H[1, 3] = height T", "Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3] = R T[3,", "cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]]) Rz", "cos(z), 0], [0, 0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32)", "np.array([[1, 0, 0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0,", "move M = np.diagflat([1., 1., 1., 1.]) M[0:3, 3] = t.astype(np.float32) T =", "from PIL import Image import matplotlib.pyplot as plt # global data bfm =", "import matplotlib.pyplot as plt # global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles,", "import mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection from math import", "= np.zeros((4, 4)) T[0:3, 0:3] = R T[3, 3] = 1. # scale", "skimage import src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil", "as plt # global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height):", "H[1, 1] = -1.0 H[1, 3] = height T = H.dot(T) return T.astype(np.float32)", "[0, 0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T =", "M[0:3, 3] = t.astype(np.float32) T = M.dot(T) # revert height # x[:,1]=height-x[:,1] H", "src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection from math", "mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection from math import sin,", "# global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\" :param", "bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\" :param s: scale :param", "R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3] =", "S.dot(T) # offset move M = np.diagflat([1., 1., 1., 1.]) M[0:3, 3] =", "R T[3, 3] = 1. # scale S = np.diagflat([s, s, s, 1.])", "cos, asin, acos, atan, atan2 from PIL import Image import matplotlib.pyplot as plt", "scale :param angles: [3] rad :param t: [3] :return: 4x4 transmatrix \"\"\" x,", "np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0, 0, 1]]) # rotate R =", ":param angles: [3] rad :param t: [3] :return: 4x4 transmatrix \"\"\" x, y,", "from src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection from", "os import sys import numpy as np import scipy.io as sio from skimage", "revert height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.]) H[1, 1] =", "sin, cos, asin, acos, atan, atan2 from PIL import Image import matplotlib.pyplot as", "x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.]) H[1, 1] = -1.0 H[1, 3]", "R = R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3] = R T[3, 3]", "import src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import", "s: scale :param angles: [3] rad :param t: [3] :return: 4x4 transmatrix \"\"\"", "0], [-sin(z), cos(z), 0], [0, 0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz) R", "import Image import matplotlib.pyplot as plt # global data bfm = MorphabelModel('data/Out/BFM.mat') def", ":return: 4x4 transmatrix \"\"\" x, y, z = angles[0], angles[1], angles[2] Rx =", "# rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4, 4)) T[0:3,", "asin, acos, atan, atan2 from PIL import Image import matplotlib.pyplot as plt #", "global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\" :param s:", "= np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0, 0, 1]]) # rotate R", "x, y, z = angles[0], angles[1], angles[2] Rx = np.array([[1, 0, 0], [0,", "src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection", "-sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z),", "T = M.dot(T) # revert height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1.,", "# revert height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.]) H[1, 1]", "1.]) H[1, 1] = -1.0 H[1, 3] = height T = H.dot(T) return", "transmatrix \"\"\" x, y, z = angles[0], angles[1], angles[2] Rx = np.array([[1, 0,", "[sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0, 0,", "angles[0], angles[1], angles[2] Rx = np.array([[1, 0, 0], [0, cos(x), sin(x)], [0, -sin(x),", "= M.dot(T) # revert height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.])", "height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.]) H[1, 1] = -1.0", "1., 1., 1.]) H[1, 1] = -1.0 H[1, 3] = height T =", "as sio from skimage import io import time import math import skimage import", "Rx = np.array([[1, 0, 0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry =", "angles[2] Rx = np.array([[1, 0, 0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry", "# scale S = np.diagflat([s, s, s, 1.]) T = S.dot(T) # offset", "angles[1], angles[2] Rx = np.array([[1, 0, 0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]])", "import io import time import math import skimage import src.faceutil from src.faceutil import", "time import math import skimage import src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model", "def get_transform_matrix(s, angles, t, height): \"\"\" :param s: scale :param angles: [3] rad", "1., 1.]) H[1, 1] = -1.0 H[1, 3] = height T = H.dot(T)", "H = np.diagflat([1., 1., 1., 1.]) H[1, 1] = -1.0 H[1, 3] =", "= 1. # scale S = np.diagflat([s, s, s, 1.]) T = S.dot(T)", "NormDirection from math import sin, cos, asin, acos, atan, atan2 from PIL import", "[0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y), 0,", "0, 0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)],", "sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y),", "import scipy.io as sio from skimage import io import time import math import", "= MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\" :param s: scale :param angles:", "1.]) M[0:3, 3] = t.astype(np.float32) T = M.dot(T) # revert height # x[:,1]=height-x[:,1]", "offset move M = np.diagflat([1., 1., 1., 1.]) M[0:3, 3] = t.astype(np.float32) T", "s, s, 1.]) T = S.dot(T) # offset move M = np.diagflat([1., 1.,", "import numpy as np import scipy.io as sio from skimage import io import", "= R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3] = R T[3, 3] =", "as np import scipy.io as sio from skimage import io import time import", "import NormDirection from math import sin, cos, asin, acos, atan, atan2 from PIL", "sys import numpy as np import scipy.io as sio from skimage import io", "4)) T[0:3, 0:3] = R T[3, 3] = 1. # scale S =", "angles, t, height): \"\"\" :param s: scale :param angles: [3] rad :param t:", "0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0, 0, 1]])", "scipy.io as sio from skimage import io import time import math import skimage", "skimage import io import time import math import skimage import src.faceutil from src.faceutil", "np.diagflat([1., 1., 1., 1.]) M[0:3, 3] = t.astype(np.float32) T = M.dot(T) # revert", "import time import math import skimage import src.faceutil from src.faceutil import mesh from", "[0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0, 1,", "= np.array([[1, 0, 0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y),", "T[3, 3] = 1. # scale S = np.diagflat([s, s, s, 1.]) T", "# offset move M = np.diagflat([1., 1., 1., 1.]) M[0:3, 3] = t.astype(np.float32)", "0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4,", "= S.dot(T) # offset move M = np.diagflat([1., 1., 1., 1.]) M[0:3, 3]", "[3] rad :param t: [3] :return: 4x4 transmatrix \"\"\" x, y, z =", "3] = 1. # scale S = np.diagflat([s, s, s, 1.]) T =", "3] = t.astype(np.float32) T = M.dot(T) # revert height # x[:,1]=height-x[:,1] H =", "t: [3] :return: 4x4 transmatrix \"\"\" x, y, z = angles[0], angles[1], angles[2]", "= np.diagflat([s, s, s, 1.]) T = S.dot(T) # offset move M =", "T = np.zeros((4, 4)) T[0:3, 0:3] = R T[3, 3] = 1. #", "1. # scale S = np.diagflat([s, s, s, 1.]) T = S.dot(T) #", "s, 1.]) T = S.dot(T) # offset move M = np.diagflat([1., 1., 1.,", "y, z = angles[0], angles[1], angles[2] Rx = np.array([[1, 0, 0], [0, cos(x),", "1.]) T = S.dot(T) # offset move M = np.diagflat([1., 1., 1., 1.])", "= t.astype(np.float32) T = M.dot(T) # revert height # x[:,1]=height-x[:,1] H = np.diagflat([1.,", "angles: [3] rad :param t: [3] :return: 4x4 transmatrix \"\"\" x, y, z", "src.util.matlabutil import NormDirection from math import sin, cos, asin, acos, atan, atan2 from", "import sys import numpy as np import scipy.io as sio from skimage import", "1, 0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0],", "0], [0, 0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T", "-sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]])", "import sin, cos, asin, acos, atan, atan2 from PIL import Image import matplotlib.pyplot", "0:3] = R T[3, 3] = 1. # scale S = np.diagflat([s, s,", "io import time import math import skimage import src.faceutil from src.faceutil import mesh", "sio from skimage import io import time import math import skimage import src.faceutil", ":param s: scale :param angles: [3] rad :param t: [3] :return: 4x4 transmatrix", "= R T[3, 3] = 1. # scale S = np.diagflat([s, s, s,", "import skimage import src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel from", "sin(z), 0], [-sin(z), cos(z), 0], [0, 0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz)", "Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0, 0, 1]]) # rotate", "t, height): \"\"\" :param s: scale :param angles: [3] rad :param t: [3]", ":param t: [3] :return: 4x4 transmatrix \"\"\" x, y, z = angles[0], angles[1],", "M = np.diagflat([1., 1., 1., 1.]) M[0:3, 3] = t.astype(np.float32) T = M.dot(T)", "np import scipy.io as sio from skimage import io import time import math", "1., 1., 1.]) M[0:3, 3] = t.astype(np.float32) T = M.dot(T) # revert height", "import math import skimage import src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model import", "height): \"\"\" :param s: scale :param angles: [3] rad :param t: [3] :return:", "np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z),", "T = S.dot(T) # offset move M = np.diagflat([1., 1., 1., 1.]) M[0:3,", "np.zeros((4, 4)) T[0:3, 0:3] = R T[3, 3] = 1. # scale S", "cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0, 0, 1]]) #", "\"\"\" :param s: scale :param angles: [3] rad :param t: [3] :return: 4x4", "scale S = np.diagflat([s, s, s, 1.]) T = S.dot(T) # offset move", "atan, atan2 from PIL import Image import matplotlib.pyplot as plt # global data", "z = angles[0], angles[1], angles[2] Rx = np.array([[1, 0, 0], [0, cos(x), sin(x)],", "[0, 1, 0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z),", "data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\" :param s: scale", "[-sin(z), cos(z), 0], [0, 0, 1]]) # rotate R = Rx.dot(Ry).dot(Rz) R =", "PIL import Image import matplotlib.pyplot as plt # global data bfm = MorphabelModel('data/Out/BFM.mat')", "import MorphabelModel from src.util.matlabutil import NormDirection from math import sin, cos, asin, acos,", "[3] :return: 4x4 transmatrix \"\"\" x, y, z = angles[0], angles[1], angles[2] Rx", "T[0:3, 0:3] = R T[3, 3] = 1. # scale S = np.diagflat([s,", "Image import matplotlib.pyplot as plt # global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s,", "from skimage import io import time import math import skimage import src.faceutil from", "R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3] = R T[3, 3] = 1.", "rad :param t: [3] :return: 4x4 transmatrix \"\"\" x, y, z = angles[0],", "M.dot(T) # revert height # x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.]) H[1,", "= np.diagflat([1., 1., 1., 1.]) M[0:3, 3] = t.astype(np.float32) T = M.dot(T) #", "MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\" :param s: scale :param angles: [3]", "Ry = np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]]) Rz =", "0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0], [-sin(z), cos(z), 0], [0,", "\"\"\" x, y, z = angles[0], angles[1], angles[2] Rx = np.array([[1, 0, 0],", "1]]) # rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4, 4))", "0], [0, cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0,", "1., 1.]) M[0:3, 3] = t.astype(np.float32) T = M.dot(T) # revert height #", "math import sin, cos, asin, acos, atan, atan2 from PIL import Image import", "cos(x), sin(x)], [0, -sin(x), cos(x)]]) Ry = np.array([[cos(y), 0, -sin(y)], [0, 1, 0],", "src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection from math import sin, cos, asin,", "from src.util.matlabutil import NormDirection from math import sin, cos, asin, acos, atan, atan2", "= Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3] = R", "numpy as np import scipy.io as sio from skimage import io import time", "= np.array([[cos(y), 0, -sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z),", "from src.faceutil.morphable_model import MorphabelModel from src.util.matlabutil import NormDirection from math import sin, cos,", "0, -sin(y)], [0, 1, 0], [sin(y), 0, cos(y)]]) Rz = np.array([[cos(z), sin(z), 0],", "math import skimage import src.faceutil from src.faceutil import mesh from src.faceutil.morphable_model import MorphabelModel", "np.diagflat([s, s, s, 1.]) T = S.dot(T) # offset move M = np.diagflat([1.,", "S = np.diagflat([s, s, s, 1.]) T = S.dot(T) # offset move M", "= angles[0], angles[1], angles[2] Rx = np.array([[1, 0, 0], [0, cos(x), sin(x)], [0,", "4x4 transmatrix \"\"\" x, y, z = angles[0], angles[1], angles[2] Rx = np.array([[1,", "get_transform_matrix(s, angles, t, height): \"\"\" :param s: scale :param angles: [3] rad :param", "matplotlib.pyplot as plt # global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t,", "import os import sys import numpy as np import scipy.io as sio from", "# x[:,1]=height-x[:,1] H = np.diagflat([1., 1., 1., 1.]) H[1, 1] = -1.0 H[1,", "plt # global data bfm = MorphabelModel('data/Out/BFM.mat') def get_transform_matrix(s, angles, t, height): \"\"\"", "rotate R = Rx.dot(Ry).dot(Rz) R = R.astype(np.float32) T = np.zeros((4, 4)) T[0:3, 0:3]", "= np.diagflat([1., 1., 1., 1.]) H[1, 1] = -1.0 H[1, 3] = height", "acos, atan, atan2 from PIL import Image import matplotlib.pyplot as plt # global", "MorphabelModel from src.util.matlabutil import NormDirection from math import sin, cos, asin, acos, atan,", "atan2 from PIL import Image import matplotlib.pyplot as plt # global data bfm", "from math import sin, cos, asin, acos, atan, atan2 from PIL import Image" ]
[ "= response['page_num'] if 'page_size' in response: self.page_size = response['page_size'] if 'total_number' in response:", "utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class", "total_number(self): return self._total_number @total_number.setter def total_number(self, value): self._total_number = value def parse_response_content(self, response_content):", "= response['page_data'] if 'page_num' in response: self.page_num = response['page_num'] if 'page_size' in response:", "self._page_data @page_data.setter def page_data(self, value): if isinstance(value, list): self._page_data = list() for i", "isinstance(value, list): self._page_data = list() for i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i)", "return self._page_data @page_data.setter def page_data(self, value): if isinstance(value, list): self._page_data = list() for", "@property def page_data(self): return self._page_data @page_data.setter def page_data(self, value): if isinstance(value, list): self._page_data", "def total_number(self): return self._total_number @total_number.setter def total_number(self, value): self._total_number = value def parse_response_content(self,", "self._page_size = value @property def total_number(self): return self._total_number @total_number.setter def total_number(self, value): self._total_number", "import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num =", "in response: self.page_data = response['page_data'] if 'page_num' in response: self.page_num = response['page_num'] if", "super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num = None self._page_size = None self._total_number =", "= value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response:", "None self._page_size = None self._total_number = None @property def page_data(self): return self._page_data @page_data.setter", "PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num = None", "self.page_num = response['page_num'] if 'page_size' in response: self.page_size = response['page_size'] if 'total_number' in", "-*- coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import", "response['page_num'] if 'page_size' in response: self.page_size = response['page_size'] if 'total_number' in response: self.total_number", "response: self.page_num = response['page_num'] if 'page_size' in response: self.page_size = response['page_size'] if 'total_number'", "= None self._page_size = None self._total_number = None @property def page_data(self): return self._page_data", "AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data =", "'page_size' in response: self.page_size = response['page_size'] if 'total_number' in response: self.total_number = response['total_number']", "def total_number(self, value): self._total_number = value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content)", "list): self._page_data = list() for i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else:", "self._page_data = None self._page_num = None self._page_size = None self._total_number = None @property", "def page_size(self, value): self._page_size = value @property def total_number(self): return self._total_number @total_number.setter def", "None self._page_num = None self._page_size = None self._total_number = None @property def page_data(self):", "def page_size(self): return self._page_size @page_size.setter def page_size(self, value): self._page_size = value @property def", "alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num", "def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data =", "@total_number.setter def total_number(self, value): self._total_number = value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse,", "from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse,", "value): if isinstance(value, list): self._page_data = list() for i in value: if isinstance(i,", "# -*- coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo", "response: self.page_data = response['page_data'] if 'page_num' in response: self.page_num = response['page_num'] if 'page_size'", "self._total_number @total_number.setter def total_number(self, value): self._total_number = value def parse_response_content(self, response_content): response =", "page_size(self): return self._page_size @page_size.setter def page_size(self, value): self._page_size = value @property def total_number(self):", "self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter def page_num(self, value): self._page_num = value", "@property def page_num(self): return self._page_num @page_num.setter def page_num(self, value): self._page_num = value @property", "coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo", "def page_num(self, value): self._page_num = value @property def page_size(self): return self._page_size @page_size.setter def", "def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num = None self._page_size = None", "in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num", "in response: self.page_num = response['page_num'] if 'page_size' in response: self.page_size = response['page_size'] if", "def page_data(self): return self._page_data @page_data.setter def page_data(self, value): if isinstance(value, list): self._page_data =", "= None @property def page_data(self): return self._page_data @page_data.setter def page_data(self, value): if isinstance(value,", "page_size(self, value): self._page_size = value @property def total_number(self): return self._total_number @total_number.setter def total_number(self,", "page_data(self, value): if isinstance(value, list): self._page_data = list() for i in value: if", "value): self._page_num = value @property def page_size(self): return self._page_size @page_size.setter def page_size(self, value):", "@property def total_number(self): return self._total_number @total_number.setter def total_number(self, value): self._total_number = value def", "import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def", "@page_size.setter def page_size(self, value): self._page_size = value @property def total_number(self): return self._total_number @total_number.setter", "self.page_data = response['page_data'] if 'page_num' in response: self.page_num = response['page_num'] if 'page_size' in", "value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data", "page_data(self): return self._page_data @page_data.setter def page_data(self, value): if isinstance(value, list): self._page_data = list()", "page_num(self): return self._page_num @page_num.setter def page_num(self, value): self._page_num = value @property def page_size(self):", "if 'page_data' in response: self.page_data = response['page_data'] if 'page_num' in response: self.page_num =", "__init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num = None self._page_size = None self._total_number", "self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter def page_num(self, value): self._page_num", "isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter def page_num(self,", "self).parse_response_content(response_content) if 'page_data' in response: self.page_data = response['page_data'] if 'page_num' in response: self.page_num", "python # -*- coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from", "value @property def total_number(self): return self._total_number @total_number.setter def total_number(self, value): self._total_number = value", "self._total_number = None @property def page_data(self): return self._page_data @page_data.setter def page_data(self, value): if", "response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data = response['page_data'] if", "value): self._total_number = value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data'", "json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self):", "class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num = None self._page_size", "self._page_num = None self._page_size = None self._total_number = None @property def page_data(self): return", "= list() for i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property", "self).__init__() self._page_data = None self._page_num = None self._page_size = None self._total_number = None", "parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data = response['page_data']", "AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None self._page_num = None self._page_size =", "@page_data.setter def page_data(self, value): if isinstance(value, list): self._page_data = list() for i in", "None @property def page_data(self): return self._page_data @page_data.setter def page_data(self, value): if isinstance(value, list):", "value @property def page_size(self): return self._page_size @page_size.setter def page_size(self, value): self._page_size = value", "@page_num.setter def page_num(self, value): self._page_num = value @property def page_size(self): return self._page_size @page_size.setter", "self._page_size @page_size.setter def page_size(self, value): self._page_size = value @property def total_number(self): return self._total_number", "def page_data(self, value): if isinstance(value, list): self._page_data = list() for i in value:", "value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter", "@property def page_size(self): return self._page_size @page_size.setter def page_size(self, value): self._page_size = value @property", "return self._page_num @page_num.setter def page_num(self, value): self._page_num = value @property def page_size(self): return", "def page_num(self): return self._page_num @page_num.setter def page_num(self, value): self._page_num = value @property def", "else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter def page_num(self, value): self._page_num =", "= value @property def page_size(self): return self._page_size @page_size.setter def page_size(self, value): self._page_size =", "page_num(self, value): self._page_num = value @property def page_size(self): return self._page_size @page_size.setter def page_size(self,", "= super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data = response['page_data'] if 'page_num' in", "self._page_num @page_num.setter def page_num(self, value): self._page_num = value @property def page_size(self): return self._page_size", "return self._page_size @page_size.setter def page_size(self, value): self._page_size = value @property def total_number(self): return", "super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data = response['page_data'] if 'page_num' in response:", "import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data", "alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__()", "self._page_num = value @property def page_size(self): return self._page_size @page_size.setter def page_size(self, value): self._page_size", "if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter def", "i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return", "PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self): return self._page_num @page_num.setter def page_num(self, value):", "return self._total_number @total_number.setter def total_number(self, value): self._total_number = value def parse_response_content(self, response_content): response", "if 'page_size' in response: self.page_size = response['page_size'] if 'total_number' in response: self.total_number =", "'page_num' in response: self.page_num = response['page_num'] if 'page_size' in response: self.page_size = response['page_size']", "list() for i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def", "= None self._total_number = None @property def page_data(self): return self._page_data @page_data.setter def page_data(self,", "value): self._page_size = value @property def total_number(self): return self._total_number @total_number.setter def total_number(self, value):", "'page_data' in response: self.page_data = response['page_data'] if 'page_num' in response: self.page_num = response['page_num']", "if isinstance(value, list): self._page_data = list() for i in value: if isinstance(i, PaymentSuccessPagePlanInfo):", "self._total_number = value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in", "-*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse):", "response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if 'page_data' in response: self.page_data = response['page_data'] if 'page_num'", "if 'page_num' in response: self.page_num = response['page_num'] if 'page_size' in response: self.page_size =", "response['page_data'] if 'page_num' in response: self.page_num = response['page_num'] if 'page_size' in response: self.page_size", "total_number(self, value): self._total_number = value def parse_response_content(self, response_content): response = super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).parse_response_content(response_content) if", "= None self._page_num = None self._page_size = None self._total_number = None @property def", "self._page_size = None self._total_number = None @property def page_data(self): return self._page_data @page_data.setter def", "for i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i)) @property def page_num(self):", "None self._total_number = None @property def page_data(self): return self._page_data @page_data.setter def page_data(self, value):", "#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse", "= value @property def total_number(self): return self._total_number @total_number.setter def total_number(self, value): self._total_number =", "self._page_data = list() for i in value: if isinstance(i, PaymentSuccessPagePlanInfo): self._page_data.append(i) else: self._page_data.append(PaymentSuccessPagePlanInfo.from_alipay_dict(i))", "from alipay.aop.api.domain.PaymentSuccessPagePlanInfo import PaymentSuccessPagePlanInfo class AlipayOpenMiniPlanOperateBatchqueryResponse(AlipayResponse): def __init__(self): super(AlipayOpenMiniPlanOperateBatchqueryResponse, self).__init__() self._page_data = None" ]
[ "else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def", "is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except", "region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self):", "def fetch_generator(self, chrom_interval): # Tabix will throw a ValueError if the chromosome specified", "contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will throw a ValueError if", "will throw a ValueError if the chromosome specified is not # present in", "self.__contig_mapping = {standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs} @property def header(self): return", "chrom_interval): # Tabix will throw a ValueError if the chromosome specified is not", "@property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will throw a", "contig for contig in self.__tabix_file.contigs} @property def header(self): return (line for line in", "@property def header(self): return (line for line in self.__tabix_file.header) @property def contigs(self): return", "header(self): return (line for line in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def", "Copyright (C) 2018 Genomics plc from wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object):", "def close(self): self.__tabix_file.close() def __enter__(self): return self def __exit__(self, ex_type, value, traceback): self.close()", "wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename,", "the chromosome specified is not # present in the index for this file.", "# All content Copyright (C) 2018 Genomics plc from wecall.genomics.chromosome import standardise_chromosome import", "the index for this file. try: if chrom_interval.chrom is None: return self.__tabix_file.fetch() else:", "chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError:", "def header(self): return (line for line in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs", "tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig for contig in", "fetch_generator(self, chrom_interval): # Tabix will throw a ValueError if the chromosome specified is", "def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig for", "in the index for this file. try: if chrom_interval.chrom is None: return self.__tabix_file.fetch()", "contig in self.__tabix_file.contigs} @property def header(self): return (line for line in self.__tabix_file.header) @property", "throw a ValueError if the chromosome specified is not # present in the", "for this file. try: if chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch(", "for contig in self.__tabix_file.contigs} @property def header(self): return (line for line in self.__tabix_file.header)", "self.__tabix_file.contigs} @property def header(self): return (line for line in self.__tabix_file.header) @property def contigs(self):", "# Tabix will throw a ValueError if the chromosome specified is not #", "this file. try: if chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get(", "import pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping =", "ValueError: raise StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration", "Genomics plc from wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object): def __init__(self, tabix_filename):", "try: if chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom),", "return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will throw a ValueError if the", "StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return self def __exit__(self, ex_type, value, traceback):", "(line for line in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval):", "standardise_chromosome import pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping", "self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will throw a ValueError if the chromosome", "is not # present in the index for this file. try: if chrom_interval.chrom", "except ValueError: raise StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise", "present in the index for this file. try: if chrom_interval.chrom is None: return", "file. try: if chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom,", "line in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix", "raise StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def", "chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end)", "self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self, region): try:", "for line in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): #", "from wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file =", "content Copyright (C) 2018 Genomics plc from wecall.genomics.chromosome import standardise_chromosome import pysam class", "try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return", "'r') self.__contig_mapping = {standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs} @property def header(self):", "{standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs} @property def header(self): return (line for", "return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self,", "= {standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs} @property def header(self): return (line", "chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region)", "# present in the index for this file. try: if chrom_interval.chrom is None:", "plc from wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file", "chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self, region): try: return", "chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except", "__init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig for contig", "Tabix will throw a ValueError if the chromosome specified is not # present", "self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs}", "in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will", "return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise", "None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError:", "return (line for line in self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self,", "class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig):", "if the chromosome specified is not # present in the index for this", "specified is not # present in the index for this file. try: if", "TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig", "except ValueError: raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return self def __exit__(self,", "def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will throw a ValueError", "self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration def fetch_region(self, region):", "self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return self def", "not # present in the index for this file. try: if chrom_interval.chrom is", "(C) 2018 Genomics plc from wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object): def", "chromosome specified is not # present in the index for this file. try:", "contig): contig for contig in self.__tabix_file.contigs} @property def header(self): return (line for line", "self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start, chrom_interval.interval.end) except ValueError: raise StopIteration", "self.__tabix_file.header) @property def contigs(self): return self.__tabix_file.contigs def fetch_generator(self, chrom_interval): # Tabix will throw", "ValueError if the chromosome specified is not # present in the index for", "raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return self def __exit__(self, ex_type, value,", "index for this file. try: if chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return", "= pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs} @property", "pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome( contig): contig for contig in self.__tabix_file.contigs} @property def", "fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self): self.__tabix_file.close() def", "2018 Genomics plc from wecall.genomics.chromosome import standardise_chromosome import pysam class TabixWrapper(object): def __init__(self,", "return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return self", "pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r') self.__contig_mapping = {standardise_chromosome(", "if chrom_interval.chrom is None: return self.__tabix_file.fetch() else: return self.__tabix_file.fetch( self.__contig_mapping.get( chrom_interval.chrom, chrom_interval.chrom), chrom_interval.interval.start,", "def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self): self.__tabix_file.close()", "a ValueError if the chromosome specified is not # present in the index", "import standardise_chromosome import pysam class TabixWrapper(object): def __init__(self, tabix_filename): self.__tabix_file = pysam.Tabixfile(tabix_filename, 'r')", "StopIteration def fetch_region(self, region): try: return self.__tabix_file.fetch(region=region) except ValueError: raise StopIteration def close(self):", "All content Copyright (C) 2018 Genomics plc from wecall.genomics.chromosome import standardise_chromosome import pysam", "ValueError: raise StopIteration def close(self): self.__tabix_file.close() def __enter__(self): return self def __exit__(self, ex_type,", "in self.__tabix_file.contigs} @property def header(self): return (line for line in self.__tabix_file.header) @property def" ]
[ "import Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class", "admin from simplestore.products.models.category import Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields =", "simplestore.products.models.category import Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)}", "class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display = ('name', 'sku', 'price', 'slug', 'is_active',)", "simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields =", "ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display = ('name', 'sku', 'price', 'slug', 'is_active',) ordering", "django.contrib import admin from simplestore.products.models.category import Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin):", "('name', 'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter = ('is_active',) admin.site.register(Product,", "= ('name', 'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter = ('is_active',)", "Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)}", "{\"slug\": ('name',)} list_display = ('name', 'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active', 'name']", "prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display = ('name',", "'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter = ('is_active',) admin.site.register(Product, ProductAdmin)", "'price', 'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter = ('is_active',) admin.site.register(Product, ProductAdmin) admin.site.register(Category,", "prepopulated_fields = {\"slug\": ('name',)} list_display = ('name', 'sku', 'price', 'slug', 'is_active',) ordering =", "('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display = ('name', 'sku', 'price', 'slug',", "import admin from simplestore.products.models.category import Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields", "('name',)} list_display = ('name', 'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter", "from django.contrib import admin from simplestore.products.models.category import Category from simplestore.products.models.product import Product class", "= {\"slug\": ('name',)} list_display = ('name', 'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active',", "CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display =", "{\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display = ('name', 'sku', 'price',", "'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter = ('is_active',) admin.site.register(Product, ProductAdmin) admin.site.register(Category, CategoryAdmin)", "from simplestore.products.models.category import Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\":", "<reponame>martinstastny/django-store from django.contrib import admin from simplestore.products.models.category import Category from simplestore.products.models.product import Product", "from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields", "= {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display = ('name', 'sku',", "list_display = ('name', 'sku', 'price', 'slug', 'is_active',) ordering = ['-is_active', 'name'] list_filter =", "Category from simplestore.products.models.product import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin):", "import Product class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\":", "class CategoryAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} class ProductAdmin(admin.ModelAdmin): prepopulated_fields = {\"slug\": ('name',)} list_display" ]
[ "Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self,", "self._termination is None: try: loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins()", "continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor", "'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors,", "namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class", "self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None,", "loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt:", "+= 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return", "loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state", "import logging from datetime import datetime, timedelta from yapsy.PluginManager import PluginManager from api.exceptions", "PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name = plugin.name key =", "self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin in self._motors: if plugin.key in", "plugin.name key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\"", "logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt", "issue is related to some bug in application, ' + 'please open issue", "len(self._motors): logging.warning('All plugins have been disabled. Terminating application..') break if state['errors']: logging.warning('Current loop", "fallback='').lower() == \"true\" instance = plugin.plugin_object path = plugin.path yield PluginDetails(name, key, instance,", "import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10", "_build_loop_state(self): return { 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination", "plugin in self._motors: if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as", "in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception: self._termination =", "(None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for plugin in", "= PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name = plugin.name key", "self._motors: if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination", "in self._sensors: if plugin.key in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication", "logging.warning('Attempt to load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key,", "= timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors): self._motors = motors self._sensors =", "self._sensors: if plugin.key in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as", "for key in self._runtime_stats['errors']: if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN:", "= plugin.plugin_object path = plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins):", "def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration /", "(None, None, \"User interruption\") except Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception)", "timedelta() def _process_sensors(self, state): for plugin in self._sensors: if plugin.key in self._disabled_plugins: continue", "import time import logging from datetime import datetime, timedelta from yapsy.PluginManager import PluginManager", "continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except", "== \"true\" instance = plugin.plugin_object path = plugin.path yield PluginDetails(name, key, instance, wants_last_chance,", "plugin in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key", "sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins) app =", "= collect_all_plugins() sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__", "logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins():", "import datetime, timedelta from yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication from api.sensor", "'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination =", "continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures: %s', key)", "exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if", "interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for plugin in self._motors: if plugin.key", "plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor: %s',", "loop_stop = datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION:", "chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats))", "for plugin in plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core', 'key') wants_last_chance =", "self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0)", "or not plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except", "PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = []", "plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance,", "plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins() sensors, motors", "in self._motors: if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception:", "if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to", "application..') break if state['errors']: logging.warning('Current loop was interrupted by following exceptions: %s', repr(state['errors']))", "_process_sensors(self, state): for plugin in self._sensors: if plugin.key in self._disabled_plugins: continue try: state[plugin.key]", "been disabled. Terminating application..') break if state['errors']: logging.warning('Current loop was interrupted by following", "import namedtuple, defaultdict import time import logging from datetime import datetime, timedelta from", "motor_plugins def main(): all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors,", "None self._total_loops_duration = timedelta() def _process_sensors(self, state): for plugin in self._sensors: if plugin.key", "self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key in self._disabled_plugins:", "plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core', 'key') wants_last_chance", "if state['errors']: logging.warning('Current loop was interrupted by following exceptions: %s', repr(state['errors'])) loop_stop =", "== \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as e: logging.error('Unexpected error", "= plugin.name key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() ==", "'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self,", "procedure...\") terminal_state = self._build_loop_state() for plugin in self._motors: if plugin.key in self._disabled_plugins or", "some bug in application, ' + 'please open issue with exception details at", "used_plugin_keys: logging.warning('Attempt to load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name,", "/ self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return { 'errors': [], 'now': datetime.now(),", "plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as", "[] for plugin in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance))", "exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for", "except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") except Exception as exception: logging.debug('\"%s\"", "logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in", "exception)) def _process_motors(self, state): for plugin in self._motors: if plugin.key in self._disabled_plugins: continue", "self._termination = None self._total_loops_duration = timedelta() def _process_sensors(self, state): for plugin in self._sensors:", "api.sensor import Sensor from api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance',", "key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] =", "isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with", "\"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if", "if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as e:", "instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = [] sensor_plugins =", "app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as", "'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core', 'key')", "try: loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) ==", "as e: logging.error('Unexpected error occurred. If you believe issue is related to some", "datetime import datetime, timedelta from yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication from", "self._termination = (None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for", "plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to load already loaded plugin. Duplicate:", "plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance = plugin.plugin_object path = plugin.path yield", "in self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor: %s', plugin.key)", "exception.reason) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") except Exception as exception:", "self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] =", "exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins()", "'last chance', fallback='').lower() == \"true\" instance = plugin.plugin_object path = plugin.path yield PluginDetails(name,", "class CoreApplication: def __init__(self, sensors, motors): self._motors = motors self._sensors = sensors self._disabled_plugins", "type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") except Exception as", "'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while self._termination is None: try: loop_start", "is None: try: loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if", "instance = plugin.plugin_object path = plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path) def", "plugin due to repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration +=", "plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins", "%s', repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration", "due to repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration", "related to some bug in application, ' + 'please open issue with exception", "collect_all_plugins() sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ ==", "loop was interrupted by following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration =", "key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\")", "load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s')", "self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins have been", "self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state)", "Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins", "%s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to load", "motors self._sensors = sensors self._disabled_plugins = set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter':", "main(): all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop()", "format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as e: logging.error('Unexpected error occurred. If you", "def start_main_loop(self): while self._termination is None: try: loop_start = datetime.now() state = self._build_loop_state()", "[] sensor_plugins = [] for plugin in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...',", "%(message)s') try: main() except Exception as e: logging.error('Unexpected error occurred. If you believe", "Sensor from api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path'])", "'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration = timedelta() def _process_sensors(self, state): for", "TerminateApplication from api.sensor import Sensor from api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name',", "plugin.instance.get_state() except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination", "Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key:", "'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration = timedelta()", "sensors, motors): self._motors = motors self._sensors = sensors self._disabled_plugins = set() self._runtime_stats =", "{ 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination", "logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\",", "you believe issue is related to some bug in application, ' + 'please", "= [] for plugin in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name,", "exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None, None, \"User", "} def start_main_loop(self): while self._termination is None: try: loop_start = datetime.now() state =", "KeyboardInterrupt: self._termination = (None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state()", "loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def", "self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins have been disabled.", "from api.exceptions import TerminateApplication from api.sensor import Sensor from api.motor import Motor PluginDetails", "datetime, timedelta from yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication from api.sensor import", "logging.warning('Disabling plugin due to repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration", "is related to some bug in application, ' + 'please open issue with", "complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in", "\"User interruption\") except Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key,", "timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration = timedelta() def _process_sensors(self, state):", "= loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except", "plugin in self._sensors: if plugin.key in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except", "= None self._total_loops_duration = timedelta() def _process_sensors(self, state): for plugin in self._sensors: if", "PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION =", "state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin in self._motors: if plugin.key in self._disabled_plugins:", "believe issue is related to some bug in application, ' + 'please open", "set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration':", "self._runtime_stats['errors']: if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due", "def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key])", "loop_duration def _build_loop_state(self): return { 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins,", "%s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins():", "_process_motors(self, state): for plugin in self._motors: if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state)", "import PluginManager from api.exceptions import TerminateApplication from api.sensor import Sensor from api.motor import", "\"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as e: logging.error('Unexpected error occurred.", "plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to", "from yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication from api.sensor import Sensor from", "sensor_plugins = [] for plugin in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key,", "_disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) >", "api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN =", "self._termination } def start_main_loop(self): while self._termination is None: try: loop_start = datetime.now() state", "as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None, None,", "path = plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys =", "def _process_motors(self, state): for plugin in self._motors: if plugin.key in self._disabled_plugins: continue try:", "sensors self._disabled_plugins = set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list),", "threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin in", "if plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor:", "wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = [] sensor_plugins = []", "as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors'])", "loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None, \"User", "sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main():", "continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance),", "state): for plugin in self._sensors: if plugin.key in self._disabled_plugins: continue try: state[plugin.key] =", "exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if", "was interrupted by following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop", "Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors',", "__name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as e: logging.error('Unexpected", "set() motor_plugins = [] sensor_plugins = [] for plugin in all_plugins: logging.debug('Processing plugin", "CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except", "= (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") except", "except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination =", "from collections import namedtuple, defaultdict import time import logging from datetime import datetime,", "plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key", "timedelta(seconds=0) } self._termination = None self._total_loops_duration = timedelta() def _process_sensors(self, state): for plugin", "logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for plugin in self._motors: if plugin.key in", "motors): self._motors = motors self._sensors = sensors self._disabled_plugins = set() self._runtime_stats = {", "= set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0),", "shutdown procedure...\") terminal_state = self._build_loop_state() for plugin in self._motors: if plugin.key in self._disabled_plugins", "plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager", "plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination", "= self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return { 'errors': [],", "exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key in self._disabled_plugins: continue if", "while self._termination is None: try: loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state)", "(plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") except Exception", "as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for", "ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors): self._motors", "by following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop - loop_start", "self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return { 'errors': [], 'now':", "None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for plugin in self._motors:", "loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors)", "if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return", "exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key in", "time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") logging.info(\"Initiating shutdown", "all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys:", "plugin.key in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception: self._termination", "(%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to load already", "logging from datetime import datetime, timedelta from yapsy.PluginManager import PluginManager from api.exceptions import", "plugin in plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core',", "self._disabled_plugins = set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration':", "used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins)", "ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration):", "= loop_duration def _build_loop_state(self): return { 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins':", "except Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def", "len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures: %s', key) self._disabled_plugins.add(key) def", "datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION -", "+ len(self._motors): logging.warning('All plugins have been disabled. Terminating application..') break if state['errors']: logging.warning('Current", "len(self._sensors) + len(self._motors): logging.warning('All plugins have been disabled. Terminating application..') break if state['errors']:", "path \"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin)", "logging.error('Unexpected error occurred. If you believe issue is related to some bug in", "If you believe issue is related to some bug in application, ' +", "return sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins) app", "= { 'start_time': datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) }", "with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins =", "plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination = (plugin.key,", "load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = [] sensor_plugins = [] for plugin in", "key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating", "start_main_loop(self): while self._termination is None: try: loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state)", "continue try: logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception:", "import TerminateApplication from api.sensor import Sensor from api.motor import Motor PluginDetails = namedtuple('PluginInfo',", "self._total_loops_duration = timedelta() def _process_sensors(self, state): for plugin in self._sensors: if plugin.key in", "= namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2)", "loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination =", "state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except", "self._sensors = sensors self._disabled_plugins = set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0,", "try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt:", "} self._termination = None self._total_loops_duration = timedelta() def _process_sensors(self, state): for plugin in", "name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor", "key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins()", "self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason)", "plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor):", "bug in application, ' + 'please open issue with exception details at https://github.com/sceeter89/command-center/issues',", "plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name = plugin.name", "self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\")", "repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] +=", "Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound", "10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors): self._motors = motors", "self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures: %s',", "= plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance = plugin.plugin_object path = plugin.path", "= motors self._sensors = sensors self._disabled_plugins = set() self._runtime_stats = { 'start_time': datetime.now(),", "{ 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def", "exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin in self._motors: if plugin.key", "logging.warning('Current loop was interrupted by following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration", "Terminating application..') break if state['errors']: logging.warning('Current loop was interrupted by following exceptions: %s',", "= datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) +", "motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception", "= (None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for plugin", "if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin", "['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication:", "- loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination", "Exception as e: logging.error('Unexpected error occurred. If you believe issue is related to", "self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return { 'errors':", "0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration =", "app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try:", "for plugin in self._motors: if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication", "self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins have been disabled. Terminating", "PluginManager from api.exceptions import TerminateApplication from api.sensor import Sensor from api.motor import Motor", "interrupted by following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop -", "in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance),", "datetime.now(), 'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None", "+= loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration", "def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name", "failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1", "logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown", "motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def", "to load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path)", "'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration = timedelta() def _process_sensors(self,", "e: logging.error('Unexpected error occurred. If you believe issue is related to some bug", "= datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION", "CoreApplication: def __init__(self, sensors, motors): self._motors = motors self._sensors = sensors self._disabled_plugins =", "except Exception as e: logging.error('Unexpected error occurred. If you believe issue is related", "from api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN", "MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors): self._motors = motors self._sensors", "api.exceptions import TerminateApplication from api.sensor import Sensor from api.motor import Motor PluginDetails =", "from api.sensor import Sensor from api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key',", "yield PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins =", "if plugin.key in used_plugin_keys: logging.warning('Attempt to load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\",", "in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures:", "def main(): all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors)", "exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin in self._motors:", "collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name =", "= [] sensor_plugins = [] for plugin in all_plugins: logging.debug('Processing plugin %s (%s)", "logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def", "'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while self._termination", "to some bug in application, ' + 'please open issue with exception details", "= CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main()", "motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG,", "plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin)", "'loop_counter': 0, 'errors': defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration", "if plugin.key in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception:", "timedelta from yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication from api.sensor import Sensor", "except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager()", "state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All", "load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue", "all_plugins = collect_all_plugins() sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if", "self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while self._termination is None: try: loop_start =", "timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors): self._motors = motors self._sensors = sensors", "self._motors = motors self._sensors = sensors self._disabled_plugins = set() self._runtime_stats = { 'start_time':", "logging.warning('All plugins have been disabled. Terminating application..') break if state['errors']: logging.warning('Current loop was", "self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return { 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats,", "type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to load already loaded plugin. Duplicate: name=\"%s\",", "path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = [] sensor_plugins = [] for", "yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication from api.sensor import Sensor from api.motor", "main() except Exception as e: logging.error('Unexpected error occurred. If you believe issue is", "= load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\": logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s]", "= 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors): self._motors =", "key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = [] sensor_plugins", "logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin", "KeyboardInterrupt: self._termination = (None, None, \"User interruption\") except Exception as exception: logging.debug('\"%s\" threw", "'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def", "threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']:", "state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key in self._runtime_stats['errors']: if key in self._disabled_plugins: continue", "1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return {", "self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self):", "= set() motor_plugins = [] sensor_plugins = [] for plugin in all_plugins: logging.debug('Processing", "= sensors self._disabled_plugins = set() self._runtime_stats = { 'start_time': datetime.now(), 'loop_counter': 0, 'errors':", "if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except TerminateApplication as exception: self._termination =", "- loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\")", "exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self): for key", "from datetime import datetime, timedelta from yapsy.PluginManager import PluginManager from api.exceptions import TerminateApplication", "= self._build_loop_state() for plugin in self._motors: if plugin.key in self._disabled_plugins or not plugin.wants_last_chance:", "isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins,", "to repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter']", "not plugin.wants_last_chance: continue try: logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception", "try: main() except Exception as e: logging.error('Unexpected error occurred. If you believe issue", "motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key)", "time import logging from datetime import datetime, timedelta from yapsy.PluginManager import PluginManager from", "plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin in plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core',", "plugins have been disabled. Terminating application..') break if state['errors']: logging.warning('Current loop was interrupted", "'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self):", "wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance = plugin.plugin_object path =", "\"true\" instance = plugin.plugin_object path = plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path)", "= plugin.instance.get_state() except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt:", "'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance = plugin.plugin_object path", "for plugin in self._motors: if plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue try:", "self._motors: if plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing last chance", "'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while self._termination is None:", "MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") logging.info(\"Initiating", "for plugin in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if", "import Sensor from api.motor import Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance',", "if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures: %s', key) self._disabled_plugins.add(key)", "error occurred. If you believe issue is related to some bug in application,", "loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration']", "'termination': self._termination } def start_main_loop(self): while self._termination is None: try: loop_start = datetime.now()", "try: logging.debug('Executing last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception)", "in used_plugin_keys: logging.warning('Attempt to load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"',", "loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance,", "def _process_sensors(self, state): for plugin in self._sensors: if plugin.key in self._disabled_plugins: continue try:", "sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins() sensors, motors =", "except KeyboardInterrupt: self._termination = (None, None, \"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state =", "as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state):", "self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins have", "occurred. If you believe issue is related to some bug in application, '", "in all_plugins: logging.debug('Processing plugin %s (%s) <%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in", "> ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin due to repeating failures: %s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self,", "Motor PluginDetails = namedtuple('PluginInfo', ['name', 'key', 'instance', 'wants_last_chance', 'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION", "motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound sensor plugin with key: %s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key)", "<%s>...', plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to load already loaded", "disabled. Terminating application..') break if state['errors']: logging.warning('Current loop was interrupted by following exceptions:", "self._runtime_stats['loop_counter'] self._runtime_stats['last_loop_duration'] = loop_duration def _build_loop_state(self): return { 'errors': [], 'now': datetime.now(), 'runtime':", "plugin.path) continue if isinstance(plugin.instance, Motor): logging.debug(\"\\tFound motor plugin.\") motor_plugins.append(plugin) if isinstance(plugin.instance, Sensor): logging.debug(\"\\tFound", "plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance = plugin.plugin_object", "def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins = [] sensor_plugins = [] for plugin", "following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration)", "defaultdict import time import logging from datetime import datetime, timedelta from yapsy.PluginManager import", "motor_plugins = [] sensor_plugins = [] for plugin in all_plugins: logging.debug('Processing plugin %s", "sensors, motors = load_plugins(all_plugins) app = CoreApplication(sensors=sensors, motors=motors) app.start_main_loop() if __name__ == \"__main__\":", "plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set() motor_plugins", "self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration", "repr(state['errors'])) loop_stop = datetime.now() loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration <", "__init__(self, sensors, motors): self._motors = motors self._sensors = sensors self._disabled_plugins = set() self._runtime_stats", "have been disabled. Terminating application..') break if state['errors']: logging.warning('Current loop was interrupted by", "plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager =", "self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for", "self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while self._termination is None: try:", "plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _process_motors(self, state): for plugin in self._motors: if", "len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins have been disabled. Terminating application..') break", "state): for plugin in self._motors: if plugin.key in self._disabled_plugins: continue try: plugin.instance.on_trigger(state) except", "defaultdict(list), 'average_loop_duration': timedelta(seconds=0), 'last_loop_duration': timedelta(seconds=0) } self._termination = None self._total_loops_duration = timedelta() def", "loop_duration = loop_stop - loop_start self._update_runtime_statistics(loop_duration) if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds())", "_update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration'] = self._total_loops_duration / self._runtime_stats['loop_counter']", "plugin.key, plugin.name, type(plugin.instance)) if plugin.key in used_plugin_keys: logging.warning('Attempt to load already loaded plugin.", "def _build_loop_state(self): return { 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination':", "[], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while", "\"User interruption\") logging.info(\"Initiating shutdown procedure...\") terminal_state = self._build_loop_state() for plugin in self._motors: if", "used_plugin_keys = set() motor_plugins = [] sensor_plugins = [] for plugin in all_plugins:", "self._build_loop_state() for plugin in self._motors: if plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue", "%s\", plugin.key) sensor_plugins.append(plugin) used_plugin_keys.add(plugin.key) return sensor_plugins, motor_plugins def main(): all_plugins = collect_all_plugins() sensors,", "self._termination = (None, None, \"User interruption\") except Exception as exception: logging.debug('\"%s\" threw exception.',", "logging.basicConfig(level=logging.DEBUG, format='[%(asctime)s][%(relativeCreated)d][%(levelname)s][%(module)s] %(message)s') try: main() except Exception as e: logging.error('Unexpected error occurred. If", "plugin in self._motors: if plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing", "terminal_state = self._build_loop_state() for plugin in self._motors: if plugin.key in self._disabled_plugins or not", "last chance motor: %s', plugin.key) plugin.instance.on_trigger(terminal_state) except Exception as exception: self._runtime_stats['errors'][plugin.key].append(exception) logging.info(\"Shutdown complete.\")", "chance', fallback='').lower() == \"true\" instance = plugin.plugin_object path = plugin.path yield PluginDetails(name, key,", "= self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins", "None, \"User interruption\") except Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception)", "plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance',", "plugin.key in used_plugin_keys: logging.warning('Attempt to load already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path", "already loaded plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue if", "= plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys = set()", "name = plugin.name key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower()", "None: try: loop_start = datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins)", "= timedelta() def _process_sensors(self, state): for plugin in self._sensors: if plugin.key in self._disabled_plugins:", "%s', key) self._disabled_plugins.add(key) def _update_runtime_statistics(self, loop_duration): self._total_loops_duration += loop_duration self._runtime_stats['loop_counter'] += 1 self._runtime_stats['average_loop_duration']", "namedtuple, defaultdict import time import logging from datetime import datetime, timedelta from yapsy.PluginManager", "= (None, None, \"User interruption\") except Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key,", "in plugin_manager.getAllPlugins(): name = plugin.name key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last", "TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason) except KeyboardInterrupt: self._termination = (None,", "key in self._runtime_stats['errors']: if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling", "plugin. Duplicate: name=\"%s\", key=\"%s\", path \"%s\"', plugin.name, plugin.key, plugin.path) continue if isinstance(plugin.instance, Motor):", "key = plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance", "Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception)) def _disable_failing_plugins(self):", "interruption\") except Exception as exception: logging.debug('\"%s\" threw exception.', plugin.key, exc_info=exception) self._runtime_stats['errors'][plugin.key].append(exception) state['errors'].append((plugin.key, exception))", "datetime.now() state = self._build_loop_state() self._process_sensors(state) self._process_motors(state) self._disable_failing_plugins() if len(self._disabled_plugins) == len(self._sensors) + len(self._motors):", "for plugin in self._sensors: if plugin.key in self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state()", "< MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None, \"User interruption\")", "return { 'errors': [], 'now': datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination }", "logging.info(\"Shutdown complete.\") logging.info(repr(self._runtime_stats)) def collect_all_plugins(): plugin_manager = PluginManager() plugin_manager.setPluginPlaces(['plugins/motors', 'plugins/sensors']) plugin_manager.collectPlugins() for plugin", "'path']) ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN = 10 MINIMAL_LOOP_DURATION = timedelta(seconds=0.2) class CoreApplication: def __init__(self, sensors, motors):", "in application, ' + 'please open issue with exception details at https://github.com/sceeter89/command-center/issues', exc_info=e)", "in self._runtime_stats['errors']: if key in self._disabled_plugins: continue if len(self._runtime_stats['errors'][key]) > ALLOWED_UNHANDLED_EXCEPTIONS_PER_PLUGIN: logging.warning('Disabling plugin", "datetime.now(), 'runtime': self._runtime_stats, 'disabled_plugins': self._disabled_plugins, 'termination': self._termination } def start_main_loop(self): while self._termination is", "state['errors']: logging.warning('Current loop was interrupted by following exceptions: %s', repr(state['errors'])) loop_stop = datetime.now()", "if loop_duration < MINIMAL_LOOP_DURATION: time.sleep((MINIMAL_LOOP_DURATION - loop_duration).total_seconds()) except KeyboardInterrupt: self._termination = (None, None,", "if len(self._disabled_plugins) == len(self._sensors) + len(self._motors): logging.warning('All plugins have been disabled. Terminating application..')", "def __init__(self, sensors, motors): self._motors = motors self._sensors = sensors self._disabled_plugins = set()", "break if state['errors']: logging.warning('Current loop was interrupted by following exceptions: %s', repr(state['errors'])) loop_stop", "in self._motors: if plugin.key in self._disabled_plugins or not plugin.wants_last_chance: continue try: logging.debug('Executing last", "plugin.plugin_object path = plugin.path yield PluginDetails(name, key, instance, wants_last_chance, path) def load_plugins(all_plugins): used_plugin_keys", "= plugin.details.get('Core', 'key') wants_last_chance = plugin.details.get('Core', 'last chance', fallback='').lower() == \"true\" instance =", "== len(self._sensors) + len(self._motors): logging.warning('All plugins have been disabled. Terminating application..') break if", "collections import namedtuple, defaultdict import time import logging from datetime import datetime, timedelta", "try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception: self._termination = (plugin.key, type(plugin.instance), exception.reason)", "self._disabled_plugins: continue try: state[plugin.key] = plugin.instance.get_state() except TerminateApplication as exception: self._termination = (plugin.key," ]
[ "13, 15, 22, 29] def __call__(self, xs): assert xs.dim() == 4 xs =", "from torchvision import models from torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module): def", "xs = self.normalize(xs) feats = [xs] for i, layer in enumerate(self._vgg): xs =", "6, 8, 11, 13, 15, 22, 29] def __call__(self, xs): assert xs.dim() ==", "xs / 255.0 xs = self.normalize(xs) feats = [xs] for i, layer in", "self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc", "VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad", "from torch import nn from torchvision import models from torchvision.transforms import transforms import", "22, 29] def __call__(self, xs): assert xs.dim() == 4 xs = util.denormalize(xs) xs", "11, 13, 15, 22, 29] def __call__(self, xs): assert xs.dim() == 4 xs", "xs.dim() == 4 xs = util.denormalize(xs) xs = xs / 255.0 xs =", "models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456,", "xs = xs / 255.0 xs = self.normalize(xs) feats = [xs] for i,", "feats = [xs] for i, layer in enumerate(self._vgg): xs = layer(xs) if i", "== 4 xs = util.denormalize(xs) xs = xs / 255.0 xs = self.normalize(xs)", "= models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485,", "__call__(self, xs): assert xs.dim() == 4 xs = util.denormalize(xs) xs = xs /", "__init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad = False", "models from torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg", "= False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1,", "8, 11, 13, 15, 22, 29] def __call__(self, xs): assert xs.dim() == 4", "import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter", "xs = util.denormalize(xs) xs = xs / 255.0 xs = self.normalize(xs) feats =", "util.denormalize(xs) xs = xs / 255.0 xs = self.normalize(xs) feats = [xs] for", "[xs] for i, layer in enumerate(self._vgg): xs = layer(xs) if i in self.keep_idc:", "i, layer in enumerate(self._vgg): xs = layer(xs) if i in self.keep_idc: feats.append(xs) return", "def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad =", "self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],", "transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for", "/ 255.0 xs = self.normalize(xs) feats = [xs] for i, layer in enumerate(self._vgg):", "0.225]) self.keep_idc = [1, 3, 6, 8, 11, 13, 15, 22, 29] def", "= [1, 3, 6, 8, 11, 13, 15, 22, 29] def __call__(self, xs):", "self.keep_idc = [1, 3, 6, 8, 11, 13, 15, 22, 29] def __call__(self,", "15, 22, 29] def __call__(self, xs): assert xs.dim() == 4 xs = util.denormalize(xs)", "super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize", "class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters():", "for parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,", "29] def __call__(self, xs): assert xs.dim() == 4 xs = util.denormalize(xs) xs =", "= util.denormalize(xs) xs = xs / 255.0 xs = self.normalize(xs) feats = [xs]", "torch import nn from torchvision import models from torchvision.transforms import transforms import util", "parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,", "False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3,", "255.0 xs = self.normalize(xs) feats = [xs] for i, layer in enumerate(self._vgg): xs", "transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3, 6, 8, 11,", "nn from torchvision import models from torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module):", "def __call__(self, xs): assert xs.dim() == 4 xs = util.denormalize(xs) xs = xs", "std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3, 6, 8, 11, 13, 15, 22,", "3, 6, 8, 11, 13, 15, 22, 29] def __call__(self, xs): assert xs.dim()", "4 xs = util.denormalize(xs) xs = xs / 255.0 xs = self.normalize(xs) feats", "= xs / 255.0 xs = self.normalize(xs) feats = [xs] for i, layer", "torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features", "for i, layer in enumerate(self._vgg): xs = layer(xs) if i in self.keep_idc: feats.append(xs)", "import nn from torchvision import models from torchvision.transforms import transforms import util class", "import transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval()", "0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3, 6, 8, 11, 13,", "[1, 3, 6, 8, 11, 13, 15, 22, 29] def __call__(self, xs): assert", "= transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3, 6, 8,", "in self._vgg.parameters(): parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])", "self.normalize(xs) feats = [xs] for i, layer in enumerate(self._vgg): xs = layer(xs) if", "xs): assert xs.dim() == 4 xs = util.denormalize(xs) xs = xs / 255.0", "layer in enumerate(self._vgg): xs = layer(xs) if i in self.keep_idc: feats.append(xs) return feats", "self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in self._vgg.parameters(): parameter.requires_grad = False self.normalize =", "import models from torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__()", "from torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg =", "self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3, 6,", "0.224, 0.225]) self.keep_idc = [1, 3, 6, 8, 11, 13, 15, 22, 29]", "parameter.requires_grad = False self.normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) self.keep_idc =", "assert xs.dim() == 4 xs = util.denormalize(xs) xs = xs / 255.0 xs", "torchvision import models from torchvision.transforms import transforms import util class VGGFeatureExtractor(nn.Module): def __init__(self):", "= [xs] for i, layer in enumerate(self._vgg): xs = layer(xs) if i in", "util class VGGFeatureExtractor(nn.Module): def __init__(self): super().__init__() self._vgg = models.vgg16(pretrained=True).features self._vgg.eval() for parameter in", "0.406], std=[0.229, 0.224, 0.225]) self.keep_idc = [1, 3, 6, 8, 11, 13, 15,", "= self.normalize(xs) feats = [xs] for i, layer in enumerate(self._vgg): xs = layer(xs)" ]
[ "'log.txt') LOG_LEVEL = 'INFO' current_app = None @staticmethod def init_app(app): Log.current_app = app", "= app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1,", "Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D',", "%(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError, handle_error)", "logging import handlers from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error)", "= os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app = None @staticmethod def init_app(app): Log.current_app", "= os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH = os.path.join(basedir, 'logs')", "= 'INFO' current_app = None @staticmethod def init_app(app): Log.current_app = app if not", "handlers from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error", "os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME", "= logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError, handle_error) @staticmethod def", "handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH,", "interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt =", "None @staticmethod def init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log", "backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s:", "file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG')", "from logging import handlers from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error):", "LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app =", "logging from logging import handlers from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def", "basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH = os.path.join(basedir,", "class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO'", "'%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError,", "os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log'", "= '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter =", "os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix =", "import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH", "fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) #", "file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError, handle_error) @staticmethod def logger(): return", "= None @staticmethod def init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) #", "not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix", "= logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件", "when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt", "# file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理", "单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) #", "'%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt)", "def init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler =", "根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler)", "InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH =", "@staticmethod def init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler", "# 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError, handle_error) @staticmethod def logger(): return Log.current_app.logger", "from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class", "error class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL =", "LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app = None @staticmethod def init_app(app):", "encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s'", "# 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' #", "file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler)", "file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter", "'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app = None @staticmethod def", "file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) #", "Log.logger().error(error) return error class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt')", "current_app = None @staticmethod def init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH)", "# 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL) fmt = '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter)", "import logging from logging import handlers from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__))", "werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return error class Log:", "os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app = None @staticmethod def init_app(app): Log.current_app =", "formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError, handle_error) @staticmethod", "= os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app = None", "Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app", "'INFO' current_app = None @staticmethod def init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH):", "import handlers from werkzeug.exceptions import InternalServerError basedir = os.path.abspath(os.path.dirname(__file__)) def handle_error(error): Log.logger().error(error) return", "os import logging from logging import handlers from werkzeug.exceptions import InternalServerError basedir =", "return error class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL", "import os import logging from logging import handlers from werkzeug.exceptions import InternalServerError basedir", "LOG_LEVEL = 'INFO' current_app = None @staticmethod def init_app(app): Log.current_app = app if", "app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0,", "os.path.join(basedir, 'logs') LOG_NAME = os.path.join(LOG_PATH, 'log.txt') LOG_LEVEL = 'INFO' current_app = None @staticmethod", "if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8')", "logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error app.register_error_handler(InternalServerError, handle_error) @staticmethod def logger():", "= '%(asctime)s-%(levelname)s-%(filename)s-%(funcName)s-%(lineno)s: %(message)s' formatter = logging.Formatter(fmt) file_handler.setFormatter(formatter) # 设置logger的日志级别:大于等于该级别才会交给handler处理 app.logger.setLevel('DEBUG') app.logger.addHandler(file_handler) # DEBUG模式下不会走到handle_error", "def handle_error(error): Log.logger().error(error) return error class Log: LOG_PATH = os.path.join(basedir, 'logs') LOG_NAME =", "logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME, when='D', interval=1, backupCount=0, encoding='utf-8') file_handler.suffix = '%Y-%m-%d.log' # 单独设置handler的日志级别:低于该级别则该handler不处理(一个logger可以有多个handler) # file_handler用来写入文件 file_handler.setLevel(Log.LOG_LEVEL)", "init_app(app): Log.current_app = app if not os.path.exists(Log.LOG_PATH): os.makedirs(Log.LOG_PATH) # 根据时间重命名log file_handler = logging.handlers.TimedRotatingFileHandler(Log.LOG_NAME," ]
[ "of requests within a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep)))", "{t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\" else:", "with each request customers = [Customer(request) for request in requests] # for r", "DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\" else: self.table = \"request_backlog\"", "request in requests] # for r in requests: # print(\"Iterating R: \", r)", "requests: # print(\"Iterating R: \", r) # print(\"Cust: \", len(customers), requests) except: Session.rollback()", "WHERE request_datetime >= {t1} and request_datetime < {t2}; \"\"\" class DemandGenerator(object): def __init__(self,", "import Session # import request query = \"\"\" SELECT * FROM {table} WHERE", "* FROM {table} WHERE request_datetime >= {t1} and request_datetime < {t2}; \"\"\" class", "each request customers = [Customer(request) for request in requests] # for r in", "# for r in requests: # print(\"Iterating R: \", r) # print(\"Cust: \",", "timestep): try: # List of requests within a certain timeframe requests = list(Session.execute(query.format(table=self.table,", "= [Customer(request) for request in requests] # for r in requests: # print(\"Iterating", "use_pattern: self.table = \"request_pattern\" else: self.table = \"request_backlog\" def generate(self, current_time, timestep): try:", "t1=current_time, t2=current_time + timestep))) # List of customers associated with each request customers", "def generate(self, current_time, timestep): try: # List of requests within a certain timeframe", "__init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\" else: self.table = \"request_backlog\" def generate(self,", "from simulator.models.customer.customer import Customer from db import Session # import request query =", "in requests] # for r in requests: # print(\"Iterating R: \", r) #", "db import Session # import request query = \"\"\" SELECT * FROM {table}", "from db import Session # import request query = \"\"\" SELECT * FROM", "= \"request_backlog\" def generate(self, current_time, timestep): try: # List of requests within a", "self.table = \"request_pattern\" else: self.table = \"request_backlog\" def generate(self, current_time, timestep): try: #", "current_time, timestep): try: # List of requests within a certain timeframe requests =", "list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of customers associated with each request", "\"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\" else: self.table", "customers associated with each request customers = [Customer(request) for request in requests] #", "<reponame>marina-haliem/Dynamic-RideSharing-Pooling-Simulator from simulator.models.customer.customer import Customer from db import Session # import request query", "request query = \"\"\" SELECT * FROM {table} WHERE request_datetime >= {t1} and", "t2=current_time + timestep))) # List of customers associated with each request customers =", "certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of customers", "request customers = [Customer(request) for request in requests] # for r in requests:", "timestep))) # List of customers associated with each request customers = [Customer(request) for", "r in requests: # print(\"Iterating R: \", r) # print(\"Cust: \", len(customers), requests)", "[Customer(request) for request in requests] # for r in requests: # print(\"Iterating R:", "{t1} and request_datetime < {t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern:", "try: # List of requests within a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time,", ">= {t1} and request_datetime < {t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if", "class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\" else: self.table =", "# print(\"Iterating R: \", r) # print(\"Cust: \", len(customers), requests) except: Session.rollback() raise", "import request query = \"\"\" SELECT * FROM {table} WHERE request_datetime >= {t1}", "print(\"Iterating R: \", r) # print(\"Cust: \", len(customers), requests) except: Session.rollback() raise finally:", "import Customer from db import Session # import request query = \"\"\" SELECT", "+ timestep))) # List of customers associated with each request customers = [Customer(request)", "self.table = \"request_backlog\" def generate(self, current_time, timestep): try: # List of requests within", "use_pattern=False): if use_pattern: self.table = \"request_pattern\" else: self.table = \"request_backlog\" def generate(self, current_time,", "# List of requests within a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time", "associated with each request customers = [Customer(request) for request in requests] # for", "\"\"\" SELECT * FROM {table} WHERE request_datetime >= {t1} and request_datetime < {t2};", "# List of customers associated with each request customers = [Customer(request) for request", "List of requests within a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time +", "requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of customers associated with", "for r in requests: # print(\"Iterating R: \", r) # print(\"Cust: \", len(customers),", "else: self.table = \"request_backlog\" def generate(self, current_time, timestep): try: # List of requests", "\"request_backlog\" def generate(self, current_time, timestep): try: # List of requests within a certain", "and request_datetime < {t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table", "within a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List", "Session # import request query = \"\"\" SELECT * FROM {table} WHERE request_datetime", "timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of customers associated", "requests within a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) #", "\", r) # print(\"Cust: \", len(customers), requests) except: Session.rollback() raise finally: Session.remove() return", "Customer from db import Session # import request query = \"\"\" SELECT *", "query = \"\"\" SELECT * FROM {table} WHERE request_datetime >= {t1} and request_datetime", "if use_pattern: self.table = \"request_pattern\" else: self.table = \"request_backlog\" def generate(self, current_time, timestep):", "\"request_pattern\" else: self.table = \"request_backlog\" def generate(self, current_time, timestep): try: # List of", "{table} WHERE request_datetime >= {t1} and request_datetime < {t2}; \"\"\" class DemandGenerator(object): def", "= list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of customers associated with each", "< {t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\"", "= \"request_pattern\" else: self.table = \"request_backlog\" def generate(self, current_time, timestep): try: # List", "a certain timeframe requests = list(Session.execute(query.format(table=self.table, t1=current_time, t2=current_time + timestep))) # List of", "def __init__(self, use_pattern=False): if use_pattern: self.table = \"request_pattern\" else: self.table = \"request_backlog\" def", "simulator.models.customer.customer import Customer from db import Session # import request query = \"\"\"", "R: \", r) # print(\"Cust: \", len(customers), requests) except: Session.rollback() raise finally: Session.remove()", "SELECT * FROM {table} WHERE request_datetime >= {t1} and request_datetime < {t2}; \"\"\"", "generate(self, current_time, timestep): try: # List of requests within a certain timeframe requests", "List of customers associated with each request customers = [Customer(request) for request in", "= \"\"\" SELECT * FROM {table} WHERE request_datetime >= {t1} and request_datetime <", "FROM {table} WHERE request_datetime >= {t1} and request_datetime < {t2}; \"\"\" class DemandGenerator(object):", "request_datetime >= {t1} and request_datetime < {t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False):", "requests] # for r in requests: # print(\"Iterating R: \", r) # print(\"Cust:", "customers = [Customer(request) for request in requests] # for r in requests: #", "of customers associated with each request customers = [Customer(request) for request in requests]", "r) # print(\"Cust: \", len(customers), requests) except: Session.rollback() raise finally: Session.remove() return customers", "for request in requests] # for r in requests: # print(\"Iterating R: \",", "in requests: # print(\"Iterating R: \", r) # print(\"Cust: \", len(customers), requests) except:", "request_datetime < {t2}; \"\"\" class DemandGenerator(object): def __init__(self, use_pattern=False): if use_pattern: self.table =", "# import request query = \"\"\" SELECT * FROM {table} WHERE request_datetime >=" ]
[ "trees = [] with open('data/' + inFile, 'rb') as fin: sentenceTokens = []", "10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = []", "line[7] token = {} token['word'] = word token['POS'] = pos token['head'] = head", "DependencyTree def loadConll(inFile): sents = [] trees = [] with open('data/' + inFile,", "= [] with open('data/' + inFile, 'rb') as fin: sentenceTokens = [] tree", "= head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees def writeConll(outFile,", "= trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1),", "for line in fin: line = line.strip() line = line.split('\\t') if len(line) <", "'rb') as fin: sentenceTokens = [] tree = DependencyTree() for line in fin:", "trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = [] else: word = line[1] pos", "= [] trees = [] with open('data/' + inFile, 'rb') as fin: sentenceTokens", "sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees = loadConll(\"train.conll\") print sents[1]", "writeConll(outFile, sentences, trees): with open(outFile, 'wb') as fout: for i in range(len(sentences)): sent", "def writeConll(outFile, sentences, trees): with open(outFile, 'wb') as fout: for i in range(len(sentences)):", "token['POS'] = pos token['head'] = head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return", "token = {} token['word'] = word token['POS'] = pos token['head'] = head token['depType']", "in fin: line = line.strip() line = line.split('\\t') if len(line) < 10: if", "= line[7] token = {} token['word'] = word token['POS'] = pos token['head'] =", "range(len(sentences)): sent = sentences[i] tree = trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" %", "head = int(line[6]) depType = line[7] token = {} token['word'] = word token['POS']", "line.strip() line = line.split('\\t') if len(line) < 10: if len(sentenceTokens) > 0: trees.append(tree)", "line[4] head = int(line[6]) depType = line[7] token = {} token['word'] = word", "token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees def writeConll(outFile, sentences, trees):", "fin: line = line.strip() line = line.split('\\t') if len(line) < 10: if len(sentenceTokens)", "line in fin: line = line.strip() line = line.split('\\t') if len(line) < 10:", "= line[1] pos = line[4] head = int(line[6]) depType = line[7] token =", "DependencyTree() for line in fin: line = line.strip() line = line.split('\\t') if len(line)", "for i in range(len(sentences)): sent = sentences[i] tree = trees[i] for j in", "inFile, 'rb') as fin: sentenceTokens = [] tree = DependencyTree() for line in", "i in range(len(sentences)): sent = sentences[i] tree = trees[i] for j in range(len(sent)):", "loadConll(inFile): sents = [] trees = [] with open('data/' + inFile, 'rb') as", "fin: sentenceTokens = [] tree = DependencyTree() for line in fin: line =", "0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = [] else: word = line[1]", "for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\")", "tree = trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'],", "depType) return sents, trees def writeConll(outFile, sentences, trees): with open(outFile, 'wb') as fout:", "j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\"", "trees): with open(outFile, 'wb') as fout: for i in range(len(sentences)): sent = sentences[i]", "= pos token['head'] = head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return sents,", "sents = [] trees = [] with open('data/' + inFile, 'rb') as fin:", "tree = DependencyTree() for line in fin: line = line.strip() line = line.split('\\t')", "[] else: word = line[1] pos = line[4] head = int(line[6]) depType =", "= [] else: word = line[1] pos = line[4] head = int(line[6]) depType", "= DependencyTree() sentenceTokens = [] else: word = line[1] pos = line[4] head", "sentences[i] tree = trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'],", "with open('data/' + inFile, 'rb') as fin: sentenceTokens = [] tree = DependencyTree()", "line.split('\\t') if len(line) < 10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree =", "= [] tree = DependencyTree() for line in fin: line = line.strip() line", "+ inFile, 'rb') as fin: sentenceTokens = [] tree = DependencyTree() for line", "{} token['word'] = word token['POS'] = pos token['head'] = head token['depType'] = depType", "return sents, trees def writeConll(outFile, sentences, trees): with open(outFile, 'wb') as fout: for", "fout: for i in range(len(sentences)): sent = sentences[i] tree = trees[i] for j", "[] tree = DependencyTree() for line in fin: line = line.strip() line =", "DependencyTree import DependencyTree def loadConll(inFile): sents = [] trees = [] with open('data/'", "depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees def writeConll(outFile, sentences, trees): with open(outFile,", "line = line.strip() line = line.split('\\t') if len(line) < 10: if len(sentenceTokens) >", "len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = [] else: word", "sentenceTokens.append(token) tree.add(head, depType) return sents, trees def writeConll(outFile, sentences, trees): with open(outFile, 'wb')", "[] trees = [] with open('data/' + inFile, 'rb') as fin: sentenceTokens =", "sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = [] else: word = line[1] pos =", "open('data/' + inFile, 'rb') as fin: sentenceTokens = [] tree = DependencyTree() for", "word = line[1] pos = line[4] head = int(line[6]) depType = line[7] token", "fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees =", "token['word'] = word token['POS'] = pos token['head'] = head token['depType'] = depType sentenceTokens.append(token)", "= sentences[i] tree = trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'],", "trees def writeConll(outFile, sentences, trees): with open(outFile, 'wb') as fout: for i in", "= line[4] head = int(line[6]) depType = line[7] token = {} token['word'] =", "= line.strip() line = line.split('\\t') if len(line) < 10: if len(sentenceTokens) > 0:", "pos = line[4] head = int(line[6]) depType = line[7] token = {} token['word']", "int(line[6]) depType = line[7] token = {} token['word'] = word token['POS'] = pos", "% (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees = loadConll(\"train.conll\")", "= depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees def writeConll(outFile, sentences, trees): with", "sentenceTokens = [] tree = DependencyTree() for line in fin: line = line.strip()", "if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = [] else:", "sentenceTokens = [] else: word = line[1] pos = line[4] head = int(line[6])", "sentences, trees): with open(outFile, 'wb') as fout: for i in range(len(sentences)): sent =", "else: word = line[1] pos = line[4] head = int(line[6]) depType = line[7]", "tree.add(head, depType) return sents, trees def writeConll(outFile, sentences, trees): with open(outFile, 'wb') as", "open(outFile, 'wb') as fout: for i in range(len(sentences)): sent = sentences[i] tree =", "in range(len(sentences)): sent = sentences[i] tree = trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\"", "token['head'] = head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees def", "= {} token['word'] = word token['POS'] = pos token['head'] = head token['depType'] =", "with open(outFile, 'wb') as fout: for i in range(len(sentences)): sent = sentences[i] tree", "tree = DependencyTree() sentenceTokens = [] else: word = line[1] pos = line[4]", "= word token['POS'] = pos token['head'] = head token['depType'] = depType sentenceTokens.append(token) tree.add(head,", "pos token['head'] = head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees", "'wb') as fout: for i in range(len(sentences)): sent = sentences[i] tree = trees[i]", "import DependencyTree def loadConll(inFile): sents = [] trees = [] with open('data/' +", "def loadConll(inFile): sents = [] trees = [] with open('data/' + inFile, 'rb')", "[] with open('data/' + inFile, 'rb') as fin: sentenceTokens = [] tree =", "> 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens = [] else: word =", "< 10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens =", "(j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees = loadConll(\"train.conll\") print", "in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents,", "sent = sentences[i] tree = trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1,", "sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees = loadConll(\"train.conll\") print sents[1] trees[1].print_tree() \"\"\"", "depType = line[7] token = {} token['word'] = word token['POS'] = pos token['head']", "= int(line[6]) depType = line[7] token = {} token['word'] = word token['POS'] =", "as fin: sentenceTokens = [] tree = DependencyTree() for line in fin: line", "sents, trees def writeConll(outFile, sentences, trees): with open(outFile, 'wb') as fout: for i", "range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees", "if len(line) < 10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree()", "trees[i] for j in range(len(sent)): fout.write(\"%d\\t%s\\t_\\t%s\\t%s\\t_\\t%d\\t%s\\t_\\t_\\n\" % (j+1, sent[j]['word'], sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1)))", "from DependencyTree import DependencyTree def loadConll(inFile): sents = [] trees = [] with", "<reponame>jay-z007/Natural-Language-Processing from DependencyTree import DependencyTree def loadConll(inFile): sents = [] trees = []", "as fout: for i in range(len(sentences)): sent = sentences[i] tree = trees[i] for", "head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType) return sents, trees def writeConll(outFile, sentences,", "= DependencyTree() for line in fin: line = line.strip() line = line.split('\\t') if", "len(line) < 10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree = DependencyTree() sentenceTokens", "DependencyTree() sentenceTokens = [] else: word = line[1] pos = line[4] head =", "line[1] pos = line[4] head = int(line[6]) depType = line[7] token = {}", "sent[j]['POS'], sent[j]['POS'], tree.getHead(j+1), tree.getLabel(j+1))) fout.write(\"\\n\") \"\"\" sents, trees = loadConll(\"train.conll\") print sents[1] trees[1].print_tree()", "word token['POS'] = pos token['head'] = head token['depType'] = depType sentenceTokens.append(token) tree.add(head, depType)", "= line.split('\\t') if len(line) < 10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens) tree", "line = line.split('\\t') if len(line) < 10: if len(sentenceTokens) > 0: trees.append(tree) sents.append(sentenceTokens)" ]
[ "<filename>vimeo/auth/__init__.py #! /usr/bin/env python # encoding: utf-8 from __future__ import absolute_import class GrantFailed(Exception):", "#! /usr/bin/env python # encoding: utf-8 from __future__ import absolute_import class GrantFailed(Exception): pass" ]
[ "distributed in the hope that it will be useful, # but WITHOUT ANY", "either version 3 of the License, or # (at your option) any later", "print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for k, v in", "by # the Free Software Foundation, either version 3 of the License, or", "version 3 of the License, or # (at your option) any later version.", "in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for k,", "web frontend. import utils edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1],", "published by # the Free Software Foundation, either version 3 of the License,", "it and/or modify # it under the terms of the GNU General Public", "print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items()))) print('var", "dwinelle-tools is free software: you can redistribute it and/or modify # it under", "# it under the terms of the GNU General Public License as published", "terms of the GNU General Public License as published by # the Free", "file is part of dwinelle-tools. # dwinelle-tools is free software: you can redistribute", "GNU General Public License as published by # the Free Software Foundation, either", "it under the terms of the GNU General Public License as published by", "be used to generate data3d.js for the web frontend. import utils edge_lengths =", "can be used to generate data3d.js for the web frontend. import utils edge_lengths", "<http://www.gnu.org/licenses/>. # This can be used to generate data3d.js for the web frontend.", "free software: you can redistribute it and/or modify # it under the terms", "utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for k, v", "# GNU General Public License for more details. # You should have received", "you can redistribute it and/or modify # it under the terms of the", "the License, or # (at your option) any later version. # dwinelle-tools is", "copy of the GNU General Public License # along with dwinelle-tools. If not,", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "utils edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k,", "of the GNU General Public License as published by # the Free Software", "License for more details. # You should have received a copy of the", "redistribute it and/or modify # it under the terms of the GNU General", "print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items()))) print('var", "Public License as published by # the Free Software Foundation, either version 3", "modify # it under the terms of the GNU General Public License as", "used to generate data3d.js for the web frontend. import utils edge_lengths = utils.load_edge_lengths()", "a copy of the GNU General Public License # along with dwinelle-tools. If", "details. # You should have received a copy of the GNU General Public", "the Free Software Foundation, either version 3 of the License, or # (at", "should have received a copy of the GNU General Public License # along", "PARTICULAR PURPOSE. See the # GNU General Public License for more details. #", "See the # GNU General Public License for more details. # You should", "k[1], v) for k, v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1],", "utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items())))", "edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items())))", "for the web frontend. import utils edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{}", "is free software: you can redistribute it and/or modify # it under the", "without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "= {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items()))) print('var eh =", "# along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This can be used", "version. # dwinelle-tools is distributed in the hope that it will be useful,", "hope that it will be useful, # but WITHOUT ANY WARRANTY; without even", "see <http://www.gnu.org/licenses/>. # This can be used to generate data3d.js for the web", "for k, v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for", "data3d.js for the web frontend. import utils edge_lengths = utils.load_edge_lengths() print('var el =", "dwinelle-tools. # dwinelle-tools is free software: you can redistribute it and/or modify #", "your option) any later version. # dwinelle-tools is distributed in the hope that", "the GNU General Public License as published by # the Free Software Foundation,", "k, v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k,", "v[2]) for k, v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0],", "coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items()))) print('var eh", "not, see <http://www.gnu.org/licenses/>. # This can be used to generate data3d.js for the", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "You should have received a copy of the GNU General Public License #", "# the Free Software Foundation, either version 3 of the License, or #", "Public License # along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This can", "with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This can be used to generate", "the GNU General Public License # along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>.", "v) for k, v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2])", "Foundation, either version 3 of the License, or # (at your option) any", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "is part of dwinelle-tools. # dwinelle-tools is free software: you can redistribute it", "useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #", "General Public License # along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This", "that it will be useful, # but WITHOUT ANY WARRANTY; without even the", "PURPOSE. See the # GNU General Public License for more details. # You", "the # GNU General Public License for more details. # You should have", "License as published by # the Free Software Foundation, either version 3 of", "the terms of the GNU General Public License as published by # the", "# dwinelle-tools is free software: you can redistribute it and/or modify # it", "to generate data3d.js for the web frontend. import utils edge_lengths = utils.load_edge_lengths() print('var", "as published by # the Free Software Foundation, either version 3 of the", "#!/usr/bin/env python3 # This file is part of dwinelle-tools. # dwinelle-tools is free", "more details. # You should have received a copy of the GNU General", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "License, or # (at your option) any later version. # dwinelle-tools is distributed", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "Public License for more details. # You should have received a copy of", "it will be useful, # but WITHOUT ANY WARRANTY; without even the implied", "option) any later version. # dwinelle-tools is distributed in the hope that it", "WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS", "eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for k, v in utils.load_edge_heights().items())))", "the hope that it will be useful, # but WITHOUT ANY WARRANTY; without", "# (at your option) any later version. # dwinelle-tools is distributed in the", "or # (at your option) any later version. # dwinelle-tools is distributed in", "of dwinelle-tools. # dwinelle-tools is free software: you can redistribute it and/or modify", "python3 # This file is part of dwinelle-tools. # dwinelle-tools is free software:", "v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v", "FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more", "# This file is part of dwinelle-tools. # dwinelle-tools is free software: you", "received a copy of the GNU General Public License # along with dwinelle-tools.", "If not, see <http://www.gnu.org/licenses/>. # This can be used to generate data3d.js for", "for more details. # You should have received a copy of the GNU", "This file is part of dwinelle-tools. # dwinelle-tools is free software: you can", "be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of", "have received a copy of the GNU General Public License # along with", "GNU General Public License for more details. # You should have received a", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "License # along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This can be", "# You should have received a copy of the GNU General Public License", "any later version. # dwinelle-tools is distributed in the hope that it will", "el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items()))) print('var coords", "{{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k,", "is distributed in the hope that it will be useful, # but WITHOUT", "# dwinelle-tools is distributed in the hope that it will be useful, #", "along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This can be used to", "the web frontend. import utils edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0],", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public", "in the hope that it will be useful, # but WITHOUT ANY WARRANTY;", "in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in", "of the License, or # (at your option) any later version. # dwinelle-tools", "General Public License for more details. # You should have received a copy", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for", "# This can be used to generate data3d.js for the web frontend. import", "edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v", "{{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0], v[1], v[2]) for k, v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{}", "and/or modify # it under the terms of the GNU General Public License", "v[1], v[2]) for k, v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1],", "= utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v in", "part of dwinelle-tools. # dwinelle-tools is free software: you can redistribute it and/or", "generate data3d.js for the web frontend. import utils edge_lengths = utils.load_edge_lengths() print('var el", "3 of the License, or # (at your option) any later version. #", "A PARTICULAR PURPOSE. See the # GNU General Public License for more details.", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License", "GNU General Public License # along with dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. #", "k, v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2])", "<filename>dwinelle/video/gen_3d.py #!/usr/bin/env python3 # This file is part of dwinelle-tools. # dwinelle-tools is", "will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty", "frontend. import utils edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v)", "General Public License as published by # the Free Software Foundation, either version", "software: you can redistribute it and/or modify # it under the terms of", "= {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items()))) print('var coords =", "{}\":{}'.format(k[0], k[1], v) for k, v in edge_lengths.items()))) print('var coords = {{{}}};'.format(','.join('{}:{{x:{},y:{},z:{}}}'.format(k, v[0],", "import utils edge_lengths = utils.load_edge_lengths() print('var el = {{{}}};'.format(','.join('\"{} {}\":{}'.format(k[0], k[1], v) for", "of the GNU General Public License # along with dwinelle-tools. If not, see", "for k, v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1],", "v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0], k[1], v[0], v[1], v[2]) for", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "later version. # dwinelle-tools is distributed in the hope that it will be", "This can be used to generate data3d.js for the web frontend. import utils", "under the terms of the GNU General Public License as published by #", "# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "dwinelle-tools. If not, see <http://www.gnu.org/licenses/>. # This can be used to generate data3d.js", "dwinelle-tools is distributed in the hope that it will be useful, # but", "can redistribute it and/or modify # it under the terms of the GNU", "Free Software Foundation, either version 3 of the License, or # (at your", "Software Foundation, either version 3 of the License, or # (at your option)", "v[0], v[1], v[2]) for k, v in utils.get_node_coords().items()))) print('var eh = {{{}}};'.format(','.join('\"{} {}\":{{bot:{},top:{},l:{}}}'.format(k[0],", "(at your option) any later version. # dwinelle-tools is distributed in the hope", "but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or" ]
[ "= stockInput + \" price update!\" if oldprice < targetprice: if newprice >=", "= price.replace(\",\", \"\") print(\"The price is: \" + price) newprice = float(price.replace(\",\", \"\"))", "from flask import Flask, render_template, request, url_for from threading import Thread app =", "message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work,", "newprice >= targetprice: body = stockInput.upper() + \" rose to \" + str(newprice)", "f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') }", "fell to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email,", "\"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice: body", "do_work(stockInput, targetprice, email): targetprice = float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper()", "'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return render_template(\"site.html\") if __name__ ==", "= BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text", "= float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price is: \" + price)", "newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\")", "str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice ==", "+ str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs", "url_for from threading import Thread app = Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\")", "= float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper()", "= requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b)", "} print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return render_template(\"site.html\") if __name__ == \"__main__\":", "\"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\")", "server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice: body = stockInput.upper() + \" has", "True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound", "oldprice > targetprice: if newprice <= targetprice: body = stockInput.upper() + \" fell", "def start_task(): def do_work(stockInput, targetprice, email): targetprice = float(targetprice) while True: URL =", "body = stockInput.upper() + \" rose to \" + str(newprice) + \"!\" message", "\" price update!\" if oldprice < targetprice: if newprice >= targetprice: body =", "URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound =", "price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\",", "+ str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice", "reached $\" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message)", "+ \" has reached $\" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\"", "email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread =", "time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs)", "class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice =", "targetprice: body = stockInput.upper() + \" has reached $\" + str(newprice) + \"!\"", "stockInput.upper() + \" has reached $\" + str(newprice) + \"!\" message = f\"Subject:", "+ \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice:", "targetprice: if newprice >= targetprice: body = stockInput.upper() + \" rose to \"", "while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\"", "stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound,", "smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput + \" price", "{head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice: body = stockInput.upper() + \"", "= Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice,", "float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper() +", "threading import Thread app = Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def", "email): targetprice = float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\"", "= float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head", "\"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price is: \" + price) newprice =", "price) newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\",", "from threading import Thread app = Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task')", "+ \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_", "body = stockInput.upper() + \" fell to \" + str(newprice) + \"!\" message", "f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice: body = stockInput.upper() +", "\" fell to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\",", "{ 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return render_template(\"site.html\")", "= \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text", "f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice: if newprice <= targetprice:", "= stockInput.upper() + \" has reached $\" + str(newprice) + \"!\" message =", "head = stockInput + \" price update!\" if oldprice < targetprice: if newprice", "< targetprice: if newprice >= targetprice: body = stockInput.upper() + \" rose to", "$\" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30)", "rose to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email,", ">= targetprice: body = stockInput.upper() + \" rose to \" + str(newprice) +", "+ \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice:", "print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return render_template(\"site.html\") if __name__ == \"__main__\": app.run(debug=True)", "requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px)", "targetprice: if newprice <= targetprice: body = stockInput.upper() + \" fell to \"", "'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return render_template(\"site.html\") if __name__", "@app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email): targetprice", "stockInput.upper() + \" fell to \" + str(newprice) + \"!\" message = f\"Subject:", "> targetprice: if newprice <= targetprice: body = stockInput.upper() + \" fell to", "Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The", "server.login(\"email\", \"password\") head = stockInput + \" price update!\" if oldprice < targetprice:", "== targetprice: body = stockInput.upper() + \" has reached $\" + str(newprice) +", "has reached $\" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email,", "= f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email')", "if oldprice == targetprice: body = stockInput.upper() + \" has reached $\" +", "str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice >", "retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice", "if newprice <= targetprice: body = stockInput.upper() + \" fell to \" +", "request, url_for from threading import Thread app = Flask(__name__) @app.route('/') def progstart(): return", "\" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if", "+ \" fell to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\"", "+ stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price =", "email, message) if oldprice > targetprice: if newprice <= targetprice: body = stockInput.upper()", "+ \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = {", "{head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args)", "stockInput + \" price update!\" if oldprice < targetprice: if newprice >= targetprice:", "= retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\"))", "price is: \" + price) newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587)", "= f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice: body = stockInput.upper()", "retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px)", "from bs4 import BeautifulSoup import requests, smtplib, time from flask import Flask, render_template,", "D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price is: \"", "message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice: if newprice", "\"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved", "print(\"The price is: \" + price) newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\",", "{head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice: if newprice <= targetprice: body", "time from flask import Flask, render_template, request, url_for from threading import Thread app", "Flask, render_template, request, url_for from threading import Thread app = Flask(__name__) @app.route('/') def", "Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price", "message) if oldprice > targetprice: if newprice <= targetprice: body = stockInput.upper() +", "oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price is: \" +", "return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email): targetprice = float(targetprice) while", "\"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput", "= { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return", "= \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\",", "targetprice: body = stockInput.upper() + \" rose to \" + str(newprice) + \"!\"", "+ price) newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo()", "float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price is: \" + price) newprice", "oldprice == targetprice: body = stockInput.upper() + \" has reached $\" + str(newprice)", "\"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price", "server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice: if newprice <= targetprice: body =", "\" + price) newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls()", "server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread", "message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'),", "import requests, smtplib, time from flask import Flask, render_template, request, url_for from threading", "server.ehlo() server.login(\"email\", \"password\") head = stockInput + \" price update!\" if oldprice <", "render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email): targetprice = float(targetprice) while True:", "if oldprice > targetprice: if newprice <= targetprice: body = stockInput.upper() + \"", "+ \" price update!\" if oldprice < targetprice: if newprice >= targetprice: body", "smtplib, time from flask import Flask, render_template, request, url_for from threading import Thread", "float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head =", "newtargetprice = price.replace(\",\", \"\") print(\"The price is: \" + price) newprice = float(price.replace(\",\",", "+ stockInput.upper() + \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved =", "is: \" + price) newprice = float(price.replace(\",\", \"\")) server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo()", "message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice == targetprice: body =", "server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput + \" price update!\" if", "BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice", "server = smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput +", "app = Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput,", "price.replace(\",\", \"\") print(\"The price is: \" + price) newprice = float(price.replace(\",\", \"\")) server", "targetprice: body = stockInput.upper() + \" fell to \" + str(newprice) + \"!\"", "\" has reached $\" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\",", "to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message)", "\"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs = { 'stockInput':request.args.get('ticker'),", "\"\") print(\"The price is: \" + price) newprice = float(price.replace(\",\", \"\")) server =", "flask import Flask, render_template, request, url_for from threading import Thread app = Flask(__name__)", "import Flask, render_template, request, url_for from threading import Thread app = Flask(__name__) @app.route('/')", "\" rose to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\",", "def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email): targetprice =", "start_task(): def do_work(stockInput, targetprice, email): targetprice = float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\"", "if oldprice < targetprice: if newprice >= targetprice: body = stockInput.upper() + \"", "server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput + \" price update!\" if oldprice", "\"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice: if", "Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email):", "if newprice >= targetprice: body = stockInput.upper() + \" rose to \" +", "targetprice, email): targetprice = float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() +", "bs4 import BeautifulSoup import requests, smtplib, time from flask import Flask, render_template, request,", "progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email): targetprice = float(targetprice)", "targetprice = float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" + stockInput.upper() + \"?p=\" +", "price update!\" if oldprice < targetprice: if newprice >= targetprice: body = stockInput.upper()", "render_template, request, url_for from threading import Thread app = Flask(__name__) @app.route('/') def progstart():", "stockInput.upper() + \" rose to \" + str(newprice) + \"!\" message = f\"Subject:", "+ \"?p=\" + stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html')", "Mb(-4px) D(ib)\").text oldprice = float(price.replace(\",\", \"\")) newtargetprice = price.replace(\",\", \"\") print(\"The price is:", "BeautifulSoup import requests, smtplib, time from flask import Flask, render_template, request, url_for from", "import Thread app = Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task():", "oldprice < targetprice: if newprice >= targetprice: body = stockInput.upper() + \" rose", "body = stockInput.upper() + \" has reached $\" + str(newprice) + \"!\" message", "update!\" if oldprice < targetprice: if newprice >= targetprice: body = stockInput.upper() +", "kwargs = { 'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start()", "= smtplib.SMTP(\"smtp.gmail.com\", 587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput + \"", "+ \" rose to \" + str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\"", "\"password\") head = stockInput + \" price update!\" if oldprice < targetprice: if", "'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s) Fw(b) Fz(36px) Mb(-4px) D(ib)\").text oldprice =", "stockInput.upper() + \"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\",", "email, message) if oldprice == targetprice: body = stockInput.upper() + \" has reached", "def do_work(stockInput, targetprice, email): targetprice = float(targetprice) while True: URL = \"https://finance.yahoo.com/quote/\" +", "\"&.tsrc=fin-srch\" htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ =", "<= targetprice: body = stockInput.upper() + \" fell to \" + str(newprice) +", "str(newprice) + \"!\" message = f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) time.sleep(30) kwargs =", "requests, smtplib, time from flask import Flask, render_template, request, url_for from threading import", "import BeautifulSoup import requests, smtplib, time from flask import Flask, render_template, request, url_for", "Thread app = Flask(__name__) @app.route('/') def progstart(): return render_template(\"site.html\") @app.route('/start_task') def start_task(): def", "587) server.ehlo() server.starttls() server.ehlo() server.login(\"email\", \"password\") head = stockInput + \" price update!\"", "@app.route('/start_task') def start_task(): def do_work(stockInput, targetprice, email): targetprice = float(targetprice) while True: URL", "= stockInput.upper() + \" fell to \" + str(newprice) + \"!\" message =", "htmlFound = requests.get(URL).text retrieved = BeautifulSoup(htmlFound, 'html') price = retrieved.find(\"span\", class_ = \"Trsdu(0.3s)", "= f\"Subject: {head}\\n\\n{body}\" server.sendmail(\"<EMAIL>\", email, message) if oldprice > targetprice: if newprice <=", "'stockInput':request.args.get('ticker'), 'targetprice':request.args.get('target'), 'email':request.args.get('email') } print(request.args) thread = Thread(target=do_work, kwargs=kwargs) thread.start() return render_template(\"site.html\") if", "= stockInput.upper() + \" rose to \" + str(newprice) + \"!\" message =", "newprice <= targetprice: body = stockInput.upper() + \" fell to \" + str(newprice)", "message) if oldprice == targetprice: body = stockInput.upper() + \" has reached $\"" ]
[ "subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object() logger = logging.getLogger(__name__)", "self.realm = realm self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye = False self._subscriptions", "sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async def", "= True self.identity = identity async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message))", "True self.identity = identity async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye", "= connection self.id = generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods self.is_open =", "= self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request,", "async def close(self, uri=None, message=None): if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message))", "NO_IDENTITY self._custom_state = {} self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids = {}", "def __setitem__(self, key: str, value: Any) -> None: self._custom_state[key] = value def __delitem__(self,", "self.auth_id = auth_id self.auth_methods = auth_methods self.is_open = False self.realm = realm self.identity", "def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None, trust_level=None):", "\"\"\"Represents a WAMP session happening over a connection. The session is available to", "self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity async def", "logger.debug(\"An event for %s was not sent to %s, as session \" \"isn't", "message=None): if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message)) self.is_open = False class", "self.identity = identity async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye =", "topic, args=(), kwargs=None, trust_level=None): if topic not in self._subscriptions: logger.debug(\"An event for %s", "= auth_id self.auth_methods = auth_methods self.is_open = False self.realm = realm self.identity =", "= sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async def unregister_subscription(self, sub_id: int): topic_uri", "unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC):", "logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any: pass @abstractmethod", "request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async", "WAMPSession: def __init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a", "happening over a connection. The session is available to RPC and event topic", "False def __getitem__(self, key: str) -> Any: return self._custom_state[key] def __setitem__(self, key: str,", "return subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level )", "request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request): await", "def task_done(self) -> None: pass def put_nowait(self, item: Any): pass class WAMPSession: def", "item: Any): pass class WAMPSession: def __init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None,", "self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str)", "task_done(self) -> None: pass def put_nowait(self, item: Any): pass class WAMPSession: def __init__(", "str) -> Any: return self._custom_state[key] def __setitem__(self, key: str, value: Any) -> None:", "await self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic not in", "request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce()", "msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def", "__getitem__(self, key: str) -> Any: return self._custom_state[key] def __setitem__(self, key: str, value: Any)", "import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg,", "tasks self._authenticated = False def __getitem__(self, key: str) -> Any: return self._custom_state[key] def", "async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic)", "len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs):", "cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri:", "to RPC and event topic routes. The session can be used to store", "class WAMPSession: def __init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents", "self._tasks = tasks self._authenticated = False def __getitem__(self, key: str) -> Any: return", "self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session happening", "not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity", "put_nowait(self, item: Any): pass class WAMPSession: def __init__( self, connection, realm, tasks: AsyncTaskGroup,", "async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None,", "Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg,", "by later effects: session['customer_id'] = 345 \"\"\" self.connection = connection self.id = generate_global_id()", "self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async def unregister_subscription(self, sub_id:", "self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id:", "store information to be retrieved or changed by later effects: session['customer_id'] = 345", "topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self,", "def get(self) -> Any: pass @abstractmethod def task_done(self) -> None: pass def put_nowait(self,", "abc import ABC, abstractmethod from typing import Any, Iterable, Iterator from serverwamp.adapters.async_base import", "object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any: pass", "self._said_goodbye = True await self.close(uri, message) async def close(self, uri=None, message=None): if self.is_open", "subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def", "return self._custom_state[key] def __setitem__(self, key: str, value: Any) -> None: self._custom_state[key] = value", "(abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS =", "WAMP session happening over a connection. The session is available to RPC and", "self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] =", "subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await", "message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri, message) async def close(self,", "self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id,", "The session is available to RPC and event topic routes. The session can", "if not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity =", "ABC, abstractmethod from typing import Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from", "import logging from abc import ABC, abstractmethod from typing import Any, Iterable, Iterator", "async def get(self) -> Any: pass @abstractmethod def task_done(self) -> None: pass def", "int: return len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn,", "return sub_id async def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri:", "str) -> None: del self._custom_state[key] def __len__(self) -> int: return len(self._custom_state) def __iter__(self)", "None): if not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity", "register_subscription(self, topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] =", "for %s was not sent to %s, as session \" \"isn't subscribed.\", topic,", "**fn_kwargs) async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def send_event(self, topic, args=(),", "serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg)", "a WAMP session happening over a connection. The session is available to RPC", "and event topic routes. The session can be used to store information to", "True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity async def abort(self, uri=None,", "event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self): await", "serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce,", "is available to RPC and event topic routes. The session can be used", "self._custom_state = {} self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids = {} self._tasks", "& 0xFFFFFFFF async def mark_authenticated(self, identity: Any = None): if not self._authenticated: self._authenticated", "def __getitem__(self, key: str) -> Any: return self._custom_state[key] def __setitem__(self, key: str, value:", "generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object()", "session \" \"isn't subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic] msg = event_msg(", "Any: return self._custom_state[key] def __setitem__(self, key: str, value: Any) -> None: self._custom_state[key] =", "= self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async def unregister_subscription(self,", "): \"\"\"Represents a WAMP session happening over a connection. The session is available", "event for %s was not sent to %s, as session \" \"isn't subscribed.\",", "*fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def send_event(self, topic,", "= cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self,", "-> Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args,", "auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) -> int:", "await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def", "@abstractmethod def task_done(self) -> None: pass def put_nowait(self, item: Any): pass class WAMPSession:", "identity: Any = None): if not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open", "as session \" \"isn't subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic] msg =", ") await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri)", "await self.close(uri, message) async def close(self, uri=None, message=None): if self.is_open and not self._said_goodbye:", "kwargs=None, trust_level=None): if topic not in self._subscriptions: logger.debug(\"An event for %s was not", "= logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any: pass @abstractmethod def", "= {} self._subscriptions_ids = {} self._tasks = tasks self._authenticated = False def __getitem__(self,", "sent to %s, as session \" \"isn't subscribed.\", topic, self.id) return subscription_id =", "await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri, message) async def close(self, uri=None,", "message) async def close(self, uri=None, message=None): if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri,", "= event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self):", "request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string = cra_challenge_string(", "hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity: Any = None): if not self._authenticated:", "int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def", "async def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role,", "abstractmethod from typing import Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol", "def close(self, uri=None, message=None): if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message)) self.is_open", "args=(), kwargs=None, trust_level=None): if topic not in self._subscriptions: logger.debug(\"An event for %s was", "self.is_open = True self.identity = identity async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri,", "await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string = cra_challenge_string( self.id,", "from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg,", "await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri]", "def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg:", "= {} self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids = {} self._tasks =", "import Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg,", "not sent to %s, as session \" \"isn't subscribed.\", topic, self.id) return subscription_id", "class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any: pass @abstractmethod def task_done(self) ->", "Any): pass class WAMPSession: def __init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=()", "nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) -> int: sub_id =", "self._custom_state[key] = value def __delitem__(self, key: str) -> None: del self._custom_state[key] def __len__(self)", "self.connection = connection self.id = generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods self.is_open", "topic not in self._subscriptions: logger.debug(\"An event for %s was not sent to %s,", "The session can be used to store information to be retrieved or changed", "if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message)) self.is_open = False class NoSuchSubscription(Exception):", "not in self._subscriptions: logger.debug(\"An event for %s was not sent to %s, as", "typing import Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg,", "self.id) return subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level", "available to RPC and event topic routes. The session can be used to", "def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string =", "async def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self,", "publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async", "in self._subscriptions: logger.debug(\"An event for %s was not sent to %s, as session", "mark_authenticated(self, identity: Any = None): if not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id))", "connection. The session is available to RPC and event topic routes. The session", "used to store information to be retrieved or changed by later effects: session['customer_id']", "cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY", "def register_subscription(self, topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id]", "self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async def unregister_subscription(self, sub_id: int):", "{} self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids = {} self._tasks = tasks", "False self._subscriptions = {} self._subscriptions_ids = {} self._tasks = tasks self._authenticated = False", "auth_role: str, auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() )", "NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id)", "self.close(uri, message) async def close(self, uri=None, message=None): if self.is_open and not self._said_goodbye: await", "mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF", "Any: pass @abstractmethod def task_done(self) -> None: pass def put_nowait(self, item: Any): pass", "effects: session['customer_id'] = 345 \"\"\" self.connection = connection self.id = generate_global_id() self.auth_id =", "self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod", "= None): if not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True", "__iter__(self) -> Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn,", "AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any: pass @abstractmethod def task_done(self) -> None:", "-> int: return len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async def spawn_task(self,", "NO_IDENTITY = object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) ->", "self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic not in self._subscriptions:", "del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) )", "async def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription()", "str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return", "Any = None): if not self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open =", "def put_nowait(self, item: Any): pass class WAMPSession: def __init__( self, connection, realm, tasks:", "NO_MORE_EVENTS = object() NO_IDENTITY = object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async", "over a connection. The session is available to RPC and event topic routes.", "identity async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await", "challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def", "spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable):", "uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri, message) async def", "trust_level=None): if topic not in self._subscriptions: logger.debug(\"An event for %s was not sent", "= self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg)", "value: Any) -> None: self._custom_state[key] = value def __delitem__(self, key: str) -> None:", "await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) )", "self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids = {} self._tasks = tasks self._authenticated", "uri=None, message=None): if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message)) self.is_open = False", "AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg,", "subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args,", "connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session happening over", "topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg(", "Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string,", "\"\"\" self.connection = connection self.id = generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods", "<filename>serverwamp/session.py import logging from abc import ABC, abstractmethod from typing import Any, Iterable,", "routes. The session can be used to store information to be retrieved or", "logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any: pass @abstractmethod def task_done(self)", "= 345 \"\"\" self.connection = connection self.id = generate_global_id() self.auth_id = auth_id self.auth_methods", "iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def", "topic routes. The session can be used to store information to be retrieved", "auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session happening over a connection. The session", "def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs): await", "0xFFFFFFFF async def mark_authenticated(self, identity: Any = None): if not self._authenticated: self._authenticated =", "subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request): await self.connection.send_msg(", "self._authenticated: self._authenticated = True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity async", "self._subscriptions = {} self._subscriptions_ids = {} self._tasks = tasks self._authenticated = False def", "None: del self._custom_state[key] def __len__(self) -> int: return len(self._custom_state) def __iter__(self) -> Iterator[str]:", "sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async def unregister_subscription(self, sub_id: int): topic_uri =", "async def mark_authenticated(self, identity: Any = None): if not self._authenticated: self._authenticated = True", "str, value: Any) -> None: self._custom_state[key] = value def __delitem__(self, key: str) ->", "{} self._tasks = tasks self._authenticated = False def __getitem__(self, key: str) -> Any:", "self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider: str):", "auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) ->", "changed by later effects: session['customer_id'] = 345 \"\"\" self.connection = connection self.id =", "session is available to RPC and event topic routes. The session can be", "be retrieved or changed by later effects: session['customer_id'] = 345 \"\"\" self.connection =", "-> None: self._custom_state[key] = value def __delitem__(self, key: str) -> None: del self._custom_state[key]", "AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session happening over a connection. The", "mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request):", "-> Any: pass @abstractmethod def task_done(self) -> None: pass def put_nowait(self, item: Any):", ") async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return", "import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS", "was not sent to %s, as session \" \"isn't subscribed.\", topic, self.id) return", "pass def put_nowait(self, item: Any): pass class WAMPSession: def __init__( self, connection, realm,", "be used to store information to be retrieved or changed by later effects:", "if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int):", "def send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic not in self._subscriptions: logger.debug(\"An event", "key: str, value: Any) -> None: self._custom_state[key] = value def __delitem__(self, key: str)", "subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity: Any = None): if", "await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider:", "= value def __delitem__(self, key: str) -> None: del self._custom_state[key] def __len__(self) ->", "auth_methods=() ): \"\"\"Represents a WAMP session happening over a connection. The session is", "= auth_methods self.is_open = False self.realm = realm self.identity = NO_IDENTITY self._custom_state =", "session can be used to store information to be retrieved or changed by", "@staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity: Any =", "async def register_subscription(self, topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id", "event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY =", "async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri,", "to %s, as session \" \"isn't subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic]", "await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity async def abort(self, uri=None, message=None):", "not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int): await", "del self._custom_state[key] def __len__(self) -> int: return len(self._custom_state) def __iter__(self) -> Iterator[str]: return", "*fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg)", "= {} self._tasks = tasks self._authenticated = False def __getitem__(self, key: str) ->", "False self.realm = realm self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye = False", "-> Any: return self._custom_state[key] def __setitem__(self, key: str, value: Any) -> None: self._custom_state[key]", "Any) -> None: self._custom_state[key] = value def __delitem__(self, key: str) -> None: del", "= realm self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye = False self._subscriptions =", "welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod", "return len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn, *fn_args,", "msg: Iterable): await self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic", "auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async def register_subscription(self, topic_uri: str) -> int: sub_id", "sub_id async def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise", "object() NO_IDENTITY = object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self)", "ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object() logger = logging.getLogger(__name__) class", "def mark_authenticated(self, identity: Any = None): if not self._authenticated: self._authenticated = True await", "= generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods self.is_open = False self.realm =", "self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids", "tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session happening over a connection.", "**fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async", "retrieved or changed by later effects: session['customer_id'] = 345 \"\"\" self.connection = connection", "async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string", "def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del", "__setitem__(self, key: str, value: Any) -> None: self._custom_state[key] = value def __delitem__(self, key:", "%s was not sent to %s, as session \" \"isn't subscribed.\", topic, self.id)", "-> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id", "unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity:", "pass class WAMPSession: def __init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ):", "a connection. The session is available to RPC and event topic routes. The", "event topic routes. The session can be used to store information to be", "auth_methods self.is_open = False self.realm = realm self.identity = NO_IDENTITY self._custom_state = {}", "topic_uri return sub_id async def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not", "RPC and event topic routes. The session can be used to store information", "True await self.close(uri, message) async def close(self, uri=None, message=None): if self.is_open and not", "def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity: Any = None):", "= True await self.close(uri, message) async def close(self, uri=None, message=None): if self.is_open and", "return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity: Any = None): if not", "= False self.realm = realm self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye =", "{} self._subscriptions_ids = {} self._tasks = tasks self._authenticated = False def __getitem__(self, key:", "to be retrieved or changed by later effects: session['customer_id'] = 345 \"\"\" self.connection", "str, auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await", "self._custom_state[key] def __setitem__(self, key: str, value: Any) -> None: self._custom_state[key] = value def", "can be used to store information to be retrieved or changed by later", "= False self._subscriptions = {} self._subscriptions_ids = {} self._tasks = tasks self._authenticated =", "None: pass def put_nowait(self, item: Any): pass class WAMPSession: def __init__( self, connection,", "self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity async def abort(self, uri=None, message=None): await", "or changed by later effects: session['customer_id'] = 345 \"\"\" self.connection = connection self.id", "= False def __getitem__(self, key: str) -> Any: return self._custom_state[key] def __setitem__(self, key:", "self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async", "%s, as session \" \"isn't subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic] msg", "close(self, uri=None, message=None): if self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message)) self.is_open =", "Iterable): await self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic not", ") await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role: str,", "self._authenticated = False def __getitem__(self, key: str) -> Any: return self._custom_state[key] def __setitem__(self,", "Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id,", "subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg())", "later effects: session['customer_id'] = 345 \"\"\" self.connection = connection self.id = generate_global_id() self.auth_id", "connection self.id = generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods self.is_open = False", "None: self._custom_state[key] = value def __delitem__(self, key: str) -> None: del self._custom_state[key] def", "if topic not in self._subscriptions: logger.debug(\"An event for %s was not sent to", "from typing import Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import", "args=args, kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def", "fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable): await", "value def __delitem__(self, key: str) -> None: del self._custom_state[key] def __len__(self) -> int:", "= NO_IDENTITY self._custom_state = {} self._said_goodbye = False self._subscriptions = {} self._subscriptions_ids =", "kwargs=kwargs, trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self,", "from abc import ABC, abstractmethod from typing import Any, Iterable, Iterator from serverwamp.adapters.async_base", "realm self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye = False self._subscriptions = {}", "self._tasks.spawn(fn, *fn_args, **fn_kwargs) async def send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def send_event(self,", "def __len__(self) -> int: return len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async", "@abstractmethod async def get(self) -> Any: pass @abstractmethod def task_done(self) -> None: pass", "sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri] async", "raise NoSuchSubscription() del self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request,", "self._subscriptions: logger.debug(\"An event for %s was not sent to %s, as session \"", "session['customer_id'] = 345 \"\"\" self.connection = connection self.id = generate_global_id() self.auth_id = auth_id", "auth_id self.auth_methods = auth_methods self.is_open = False self.realm = realm self.identity = NO_IDENTITY", "get(self) -> Any: pass @abstractmethod def task_done(self) -> None: pass def put_nowait(self, item:", "-> None: del self._custom_state[key] def __len__(self) -> int: return len(self._custom_state) def __iter__(self) ->", "pass @abstractmethod def task_done(self) -> None: pass def put_nowait(self, item: Any): pass class", "topic, self.id) return subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(), args=args, kwargs=kwargs,", "Iterator[str]: return iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs)", "send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic not in self._subscriptions: logger.debug(\"An event for", "generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods self.is_open = False self.realm = realm", "int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request)", "self._subscriptions_ids[sub_id] = topic_uri return sub_id async def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id)", "subscription_id) ) async def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic):", "realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session happening over a", "self.auth_methods = auth_methods self.is_open = False self.realm = realm self.identity = NO_IDENTITY self._custom_state", "trust_level=trust_level ) await self.connection.send_msg(msg) async def request_ticket_authentication(self): await self.connection.send_msg(ticket_challenge_msg()) async def request_cra_auth(self, auth_role:", "key: str) -> None: del self._custom_state[key] def __len__(self) -> int: return len(self._custom_state) def", "key: str) -> Any: return self._custom_state[key] def __setitem__(self, key: str, value: Any) ->", "send_raw(self, msg: Iterable): await self.connection.send_msg(msg) async def send_event(self, topic, args=(), kwargs=None, trust_level=None): if", "to store information to be retrieved or changed by later effects: session['customer_id'] =", "information to be retrieved or changed by later effects: session['customer_id'] = 345 \"\"\"", "auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string))", "self.is_open = False self.realm = realm self.identity = NO_IDENTITY self._custom_state = {} self._said_goodbye", "= tasks self._authenticated = False def __getitem__(self, key: str) -> Any: return self._custom_state[key]", "abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri, message) async", "def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri, message)", "= True await self.connection.send_msg(welcome_msg(self.id)) self.is_open = True self.identity = identity async def abort(self,", "__delitem__(self, key: str) -> None: del self._custom_state[key] def __len__(self) -> int: return len(self._custom_state)", "= identity async def abort(self, uri=None, message=None): await self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True", "345 \"\"\" self.connection = connection self.id = generate_global_id() self.auth_id = auth_id self.auth_methods =", "self._subscriptions[topic_uri] async def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async", "from serverwamp.adapters.async_base import AsyncTaskGroup from serverwamp.protocol import (abort_msg, cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg,", "= object() NO_IDENTITY = object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def", "message)) self._said_goodbye = True await self.close(uri, message) async def close(self, uri=None, message=None): if", "topic_uri: str) -> int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri", "-> None: pass def put_nowait(self, item: Any): pass class WAMPSession: def __init__( self,", "scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object() logger =", "goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object() NO_IDENTITY = object() logger", "\" \"isn't subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id,", "__init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP session", "self._subscriptions_ids = {} self._tasks = tasks self._authenticated = False def __getitem__(self, key: str)", "self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self,", "= object() logger = logging.getLogger(__name__) class AbstractAsyncQueue(ABC): @abstractmethod async def get(self) -> Any:", "self.connection.send_msg(abort_msg(uri, message)) self._said_goodbye = True await self.close(uri, message) async def close(self, uri=None, message=None):", "def mark_unsubscribed(self, request): await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic) &", "\"isn't subscribed.\", topic, self.id) return subscription_id = self._subscriptions[topic] msg = event_msg( subscription_id=subscription_id, publication_id=generate_global_id(),", "def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def mark_unsubscribed(self,", "unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if not topic_uri: raise NoSuchSubscription() del self._subscriptions[topic_uri]", "async def mark_subscribed(self, request, subscription_id: int): await self.connection.send_msg( subscribed_response_msg(request, subscription_id) ) async def", "= topic_uri return sub_id async def unregister_subscription(self, sub_id: int): topic_uri = self._subscriptions_ids.pop(sub_id) if", "await self.connection.send_msg( unsubscribed_response_msg(request) ) @staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def", "self.is_open and not self._said_goodbye: await self.connection.send_msg(goodbye_msg(uri, message)) self.is_open = False class NoSuchSubscription(Exception): pass", "def __delitem__(self, key: str) -> None: del self._custom_state[key] def __len__(self) -> int: return", "return iter(self._custom_state) async def spawn_task(self, fn, *fn_args, **fn_kwargs): await self._tasks.spawn(fn, *fn_args, **fn_kwargs) async", "int: sub_id = self.subscription_id_for_topic(topic_uri) self._subscriptions[topic_uri] = sub_id self._subscriptions_ids[sub_id] = topic_uri return sub_id async", "__len__(self) -> int: return len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state) async def", "str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider, nonce=scram_nonce() ) await self.connection.send_msg(cra_challenge_msg(challenge_string)) async", "logging from abc import ABC, abstractmethod from typing import Any, Iterable, Iterator from", "import ABC, abstractmethod from typing import Any, Iterable, Iterator from serverwamp.adapters.async_base import AsyncTaskGroup", "session happening over a connection. The session is available to RPC and event", "cra_challenge_msg, cra_challenge_string, event_msg, generate_global_id, goodbye_msg, scram_nonce, subscribed_response_msg, ticket_challenge_msg, unsubscribed_response_msg, welcome_msg) NO_MORE_EVENTS = object()", "def request_cra_auth(self, auth_role: str, auth_provider: str): challenge_string = cra_challenge_string( self.id, auth_id=self.auth_id, auth_provider=auth_role, auth_role=auth_provider,", ") @staticmethod def subscription_id_for_topic(topic): return hash(topic) & 0xFFFFFFFF async def mark_authenticated(self, identity: Any", "async def send_event(self, topic, args=(), kwargs=None, trust_level=None): if topic not in self._subscriptions: logger.debug(\"An", "self._custom_state[key] def __len__(self) -> int: return len(self._custom_state) def __iter__(self) -> Iterator[str]: return iter(self._custom_state)", "def __init__( self, connection, realm, tasks: AsyncTaskGroup, auth_id=None, auth_methods=() ): \"\"\"Represents a WAMP", "self.id = generate_global_id() self.auth_id = auth_id self.auth_methods = auth_methods self.is_open = False self.realm" ]
[ "freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen'] #%% Save to", "pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning", "\"\"\" Some data cleansing for the solar PV data. \"\"\" #%% import numpy", "solar PV data. \"\"\" #%% import numpy as np import pandas as pd", "#%% import numpy as np import pandas as pd # 5 years of", "= pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns", "\"\"\" #%% import numpy as np import pandas as pd # 5 years", "data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period", "as pd # 5 years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv =", "data cleansing for the solar PV data. \"\"\" #%% import numpy as np", "PV data. \"\"\" #%% import numpy as np import pandas as pd #", "5 years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00',", "pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns =", "pandas as pd # 5 years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv", "00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen'] #%%", "pd # 5 years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4]", "Some data cleansing for the solar PV data. \"\"\" #%% import numpy as", "pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen'] #%% Save to CSV pv.to_csv(\"pv_gen.csv\")", "import numpy as np import pandas as pd # 5 years of PV", "the solar PV data. \"\"\" #%% import numpy as np import pandas as", "# 5 years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01", "data. \"\"\" #%% import numpy as np import pandas as pd # 5", "PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True)", "<reponame>kmoy14-stanford/AA222FinalProject \"\"\" Some data cleansing for the solar PV data. \"\"\" #%% import", "import pandas as pd # 5 years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv')", "periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen'] #%% Save", "Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen'] #%% Save to CSV pv.to_csv(\"pv_gen.csv\") #", "for the solar PV data. \"\"\" #%% import numpy as np import pandas", "(UTC -08:00)', inplace=True) pv.columns = ['gen'] #%% Save to CSV pv.to_csv(\"pv_gen.csv\") # %%", "of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'),", "pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True)", "cleansing for the solar PV data. \"\"\" #%% import numpy as np import", "np import pandas as pd # 5 years of PV data pvdata =", "pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen']", "pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)',", "inplace=True) pv.drop(columns='Period Beginning (UTC -08:00)', inplace=True) pv.columns = ['gen'] #%% Save to CSV", "= pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040, freq='15T'), inplace=True) pv.drop(columns='Period Beginning (UTC", "as np import pandas as pd # 5 years of PV data pvdata", "numpy as np import pandas as pd # 5 years of PV data", "years of PV data pvdata = pd.read_csv('solar_PV_15min_kWh.csv') pv = pvdata[:8760*4] pv.set_index(pd.date_range(start='2021-01-01 00:00', periods=35040," ]
[ "confusion_matrix import numpy as np from keras.models import Sequential import tensorflow as tf", "Model from keras.layers.normalization import BatchNormalization import numpy as np from keras.models import load_model", "my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width,", "/ 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1)", "['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test", "from keras.models import Model from keras.layers.normalization import BatchNormalization import numpy as np from", "224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir,", "axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view',", "= test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred,", "import backend as K from keras.applications.vgg19 import VGG19 from keras.models import Model from", "print(validation_steps) img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255)", "keras.layers import Activation, Dropout, Flatten, Dense from keras import backend as K from", "as K from keras.applications.vgg19 import VGG19 from keras.models import Model from keras.layers.normalization import", "#16 test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8", "validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen =", "verbose=1) print('test acc = %.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras =", "positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr,", "import matplotlib.pyplot as plt from sklearn.metrics import auc from sklearn.metrics import roc_curve import", "= load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height),", "import tensorflow as tf from sklearn import metrics import matplotlib.pyplot as plt from", "sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16", "from keras.layers import Activation, Dropout, Flatten, Dense from keras import backend as K", "(area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show()", "from keras.preprocessing.image import ImageDataGenerator from keras.layers import Conv2D, MaxPooling2D from keras.layers import Activation,", "import roc_curve import os from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir =", "Sequential import tensorflow as tf from sklearn import metrics import matplotlib.pyplot as plt", "keras.applications.vgg19 import VGG19 from keras.models import Model from keras.layers.normalization import BatchNormalization import numpy", "rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr,", "acc = %.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras,", "np from keras.models import load_model from sklearn.metrics import classification_report, confusion_matrix import numpy as", "validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred =", "from keras.layers.normalization import BatchNormalization import numpy as np from keras.models import load_model from", "as tf from sklearn import metrics import matplotlib.pyplot as plt from sklearn.metrics import", "= Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1)", "= r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r,", "print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names))", "'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc", "Dense from keras import backend as K from keras.applications.vgg19 import VGG19 from keras.models", "Conv2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense from keras import backend", "img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator", "''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o') plt.show() #AUC =", "auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC", "''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o') plt.show() #AUC", "target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test", "''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test loss", "plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2)", "keras.layers import Conv2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense from keras", "tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o') plt.show() #AUC = auc(fpr, tpr)", "r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)])", "= my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test loss = %.3f'%(loss)) '''", "np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names =", "print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss,", "y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view', 'others']", "= %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras)", "loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test loss =", "classification_report, confusion_matrix import numpy as np from keras.models import Sequential import tensorflow as", "tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve')", "print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. /", "y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras)", "%.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras", "keras import backend as K from keras.applications.vgg19 import VGG19 from keras.models import Model", "= np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names", "=sum([len(files) for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size", "y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc))", "batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix')", "1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive", "'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate')", "np from keras.models import Sequential import tensorflow as tf from sklearn import metrics", "matplotlib.pyplot as plt from sklearn.metrics import auc from sklearn.metrics import roc_curve import os", "import auc from sklearn.metrics import roc_curve import os from sklearn.metrics import auc #validation_data_dir", "plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds", "from sklearn.metrics import classification_report, confusion_matrix import numpy as np from keras.models import Sequential", "r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d,", "keras.models import Model from keras.layers.normalization import BatchNormalization import numpy as np from keras.models", "as np from keras.models import load_model from sklearn.metrics import classification_report, confusion_matrix import numpy", "[0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True", "y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report')", "import metrics import matplotlib.pyplot as plt from sklearn.metrics import auc from sklearn.metrics import", "backend as K from keras.applications.vgg19 import VGG19 from keras.models import Model from keras.layers.normalization", "= auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area", "my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification", "numpy as np from keras.models import load_model from sklearn.metrics import classification_report, confusion_matrix import", "as plt from sklearn.metrics import auc from sklearn.metrics import roc_curve import os from", "ImageDataGenerator from keras.layers import Conv2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense", "roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras,", "tf from sklearn import metrics import matplotlib.pyplot as plt from sklearn.metrics import auc", "roc_curve import os from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5'", "= ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1)", "auc from sklearn.metrics import roc_curve import os from sklearn.metrics import auc #validation_data_dir =", "#fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o') plt.show() #AUC = auc(fpr,", "metrics import matplotlib.pyplot as plt from sklearn.metrics import auc from sklearn.metrics import roc_curve", "1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate')", "thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1],", "plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker =", "= {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras)", "keras.models import load_model from sklearn.metrics import classification_report, confusion_matrix import numpy as np from", "import classification_report, confusion_matrix import numpy as np from keras.models import Sequential import tensorflow", "my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' '''", "import numpy as np from keras.models import load_model from sklearn.metrics import classification_report, confusion_matrix", "nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1.", "Activation, Dropout, Flatten, Dense from keras import backend as K from keras.applications.vgg19 import", "plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive", "plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' '''", "target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true =", "load_model from sklearn.metrics import classification_report, confusion_matrix import numpy as np from keras.models import", "test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps=", "batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5')", "as np from keras.models import Sequential import tensorflow as tf from sklearn import", "d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width,", "validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true", "sklearn import metrics import matplotlib.pyplot as plt from sklearn.metrics import auc from sklearn.metrics", "loss = %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes,", "%.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras", "positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds =", "tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0,", "Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) '''", "auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files)", "sklearn.metrics import roc_curve import os from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir", "keras.models import Sequential import tensorflow as tf from sklearn import metrics import matplotlib.pyplot", "auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area =", "from keras.applications.vgg19 import VGG19 from keras.models import Model from keras.layers.normalization import BatchNormalization import", "BatchNormalization import numpy as np from keras.models import load_model from sklearn.metrics import classification_report,", "test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1)", "= roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--')", "import Model from keras.layers.normalization import BatchNormalization import numpy as np from keras.models import", "print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc =", "fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1],", "y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras,", "K from keras.applications.vgg19 import VGG19 from keras.models import Model from keras.layers.normalization import BatchNormalization", "import Conv2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense from keras import", "Flatten, Dense from keras import backend as K from keras.applications.vgg19 import VGG19 from", "import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count", "VGG19 from keras.models import Model from keras.layers.normalization import BatchNormalization import numpy as np", "494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count", "''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras =", "''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras)", "import load_model from sklearn.metrics import classification_report, confusion_matrix import numpy as np from keras.models", "= %.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras,", "#C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size", "<reponame>redkfa/PDF_classification from keras.preprocessing.image import ImageDataGenerator from keras.layers import Conv2D, MaxPooling2D from keras.layers import", "keras.preprocessing.image import ImageDataGenerator from keras.layers import Conv2D, MaxPooling2D from keras.layers import Activation, Dropout,", "os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224", "validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred,", "keras.layers.normalization import BatchNormalization import numpy as np from keras.models import load_model from sklearn.metrics", "sklearn.metrics import classification_report, confusion_matrix import numpy as np from keras.models import Sequential import", "from sklearn.metrics import roc_curve import os from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test'", "in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height =", "shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion", "acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test loss = %.3f'%(loss))", "label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best')", "print(auc_keras) ''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o') plt.show()", "test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size)", "from sklearn import metrics import matplotlib.pyplot as plt from sklearn.metrics import auc from", "plt from sklearn.metrics import auc from sklearn.metrics import roc_curve import os from sklearn.metrics", "load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False,", "validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d, files", "from keras.models import Sequential import tensorflow as tf from sklearn import metrics import", "import numpy as np from keras.models import Sequential import tensorflow as tf from", "= r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d, files in", "curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker", "Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras = roc_curve(validation_generator.classes, y_pred_keras) auc_keras = auc(fpr_keras,tpr_keras) print(auc_keras) plt.figure(1) plt.plot([0,", "target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator),", "Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes,", "numpy as np from keras.models import Sequential import tensorflow as tf from sklearn", "plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o')", "Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator,", "ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred =", "= ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred", "sklearn.metrics import auc from sklearn.metrics import roc_curve import os from sklearn.metrics import auc", "y_pred)) print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc", "print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel() fpr_keras, tpr_keras, thresholds_keras =", "img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes", "r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps)", "for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count)", "nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model", "= validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred)) print('Classification Report') target_names = ['3view', 'others'] print(classification_report(y_true,", "{:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) '''", "#validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for", "= 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator = test_datagen.flow_from_directory(", "thresholds = metrics.roc_curve(y_true,Y_pred, pos_label=2) plt.plot(fpr_keras,tpr_keras,marker = 'o') plt.show() #AUC = auc(fpr, tpr) '''", "#C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2 #16 test_count =sum([len(files) for r, d, files in os.walk(validation_data_dir)]) nb_validation_samples", "from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494# #C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test2", "Dropout, Flatten, Dense from keras import backend as K from keras.applications.vgg19 import VGG19", "os from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test 494#", "from keras import backend as K from keras.applications.vgg19 import VGG19 from keras.models import", "MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense from keras import backend as", "img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen = ImageDataGenerator(rescale=1. / 255) validation_generator =", "steps=len(validation_generator), verbose=1) print('test acc = %.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras", "import ImageDataGenerator from keras.layers import Conv2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten,", "import Activation, Dropout, Flatten, Dense from keras import backend as K from keras.applications.vgg19", "from keras.models import load_model from sklearn.metrics import classification_report, confusion_matrix import numpy as np", "import Sequential import tensorflow as tf from sklearn import metrics import matplotlib.pyplot as", "255) validation_generator = test_datagen.flow_from_directory( validation_data_dir, target_size=(img_width, img_height), shuffle=False, batch_size=batch_size) Y_pred = my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred", "files in os.walk(validation_data_dir)]) nb_validation_samples =test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height", "print(classification_report(y_true, y_pred, target_names=target_names)) ''' loss, acc = my_model.evaluate_generator(validation_generator, steps=len(validation_generator), verbose=1) print('test acc =", "print('test acc = %.3f'%(acc)) print('test loss = %.3f'%(loss)) ''' ''' y_pred_keras = Y_pred.ravel()", "plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False positive rate') plt.ylabel('True positive rate') plt.title('ROC", "=test_count batch_size =8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model =", "rate') plt.title('ROC curve') plt.legend(loc='best') plt.show() print(auc_keras) ''' ''' #fpr, tpr, thresholds = metrics.roc_curve(y_true,Y_pred,", "import VGG19 from keras.models import Model from keras.layers.normalization import BatchNormalization import numpy as", "from keras.layers import Conv2D, MaxPooling2D from keras.layers import Activation, Dropout, Flatten, Dense from", "from sklearn.metrics import auc from sklearn.metrics import roc_curve import os from sklearn.metrics import", "=8 validation_steps= nb_validation_samples/batch_size print(test_count) print(validation_steps) img_width, img_height = 224,224 my_model = load_model('VO_2_classification_model.h5') test_datagen", "print(auc_keras) plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras))", "plt.figure(1) plt.plot([0, 1], [0, 1], 'k--') plt.plot(fpr_keras, tpr_keras, label='ROC (area = {:.3f})'.format(auc_keras)) plt.xlabel('False", "tensorflow as tf from sklearn import metrics import matplotlib.pyplot as plt from sklearn.metrics", "import os from sklearn.metrics import auc #validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test' validation_data_dir = r'C:\\Users\\randy\\PycharmProjects\\PJ1\\classifiaction\\test5' #C:\\Users\\randy\\Downloads\\betterdataset\\test", "= my_model.predict_generator(validation_generator,len(validation_generator),verbose=1) y_pred = np.argmax(Y_pred, axis=1) y_true = validation_generator.classes print('Confusion Matrix') print(confusion_matrix(validation_generator.classes, y_pred))", "import BatchNormalization import numpy as np from keras.models import load_model from sklearn.metrics import" ]
[ "patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\",", "== [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version details from git head commit\"\"\"", "\"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20),", "patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\"", "assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version details from git", "assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission assert submission.get(\"version\", {}).get(\"author\") == (\"<EMAIL>\"),", "mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build()", "== \"b\", submission def test_tests(): \"\"\"Should contain test results used to initialize the", "test results used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert", "contain target configuration ID used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\",", "20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") ==", "submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build number used to initialize", "produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not None def", "to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\",", "converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission assert submission.get(\"version\", {}).get(\"author\") ==", "mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission =", "initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission", "the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def", "SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission assert submission.get(\"version\",", "[\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version details from", "not None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used to initialize the", "used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") ==", "submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should", "git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email", "assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build number used to", "submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should", "as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\",", "SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version", "converted_tests).build() assert submission is not None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID", "\"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version details", "\"\"\"Should contain target configuration ID used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\",", "\"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build number", "import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert", "submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission assert submission.get(\"version\", {}).get(\"author\") == (\"<EMAIL>\"), submission", "utf-8 -*- import mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\"", "SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission", "to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\",", "= \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" *", "SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should contain test", "def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used to initialize the builder\"\"\" submission", "\"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should contain test results", "def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is", "submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should", "submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not None def test_configuration_id(converted_tests): \"\"\"Should", "initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission", "submission def test_version_details(converted_tests): \"\"\"Should contain version details from git head commit\"\"\" with mock.patch(\"mig3_client.git\")", "= \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert", "head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email =", "used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") ==", "contain version details from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha =", "number used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\")", "test_version_details(converted_tests): \"\"\"Should contain version details from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git:", "builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission def test_tests():", "from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\",", "contain build number used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build()", "test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not", "def test_build_number(converted_tests): \"\"\"Should contain build number used to initialize the builder\"\"\" submission =", "version details from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\"", "the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission def", "-*- import mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission", "mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\",", "is not None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used to initialize", "\"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission assert submission.get(\"version\", {}).get(\"author\")", "submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should contain test results used to initialize", "converted_tests).build() assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build number used", "= SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not None def test_configuration_id(converted_tests): \"\"\"Should contain", "submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version details from git head", "<gh_stars>1-10 # -*- coding: utf-8 -*- import mock from mig3_client import SubmissionBuilder def", "-*- coding: utf-8 -*- import mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should", "details from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" *", "initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission", "converted_tests).build() assert submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should contain test results used", "submission is not None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used to", "results used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\")", "\"b\", submission def test_tests(): \"\"\"Should contain test results used to initialize the builder\"\"\"", "test_tests(): \"\"\"Should contain test results used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\",", "from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20", "= SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission assert", "test_build_number(converted_tests): \"\"\"Should contain build number used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\",", "something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not None def test_configuration_id(converted_tests):", "contain test results used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build()", "submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\") == (\"a1\" * 20), submission", "\"\"\"Should contain test results used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\",", "\"\"\"Should contain build number used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\",", "commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\"", "test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used to initialize the builder\"\"\" submission =", "builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests):", "== \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build number used to initialize the", "= SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should contain", "builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests):", "SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not None def test_configuration_id(converted_tests): \"\"\"Should contain target", "\"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build number used to initialize the builder\"\"\"", "used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"number\") ==", "submission def test_tests(): \"\"\"Should contain test results used to initialize the builder\"\"\" submission", "to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"],", "assert submission is not None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used", "ID used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\")", "def test_tests(): \"\"\"Should contain test results used to initialize the builder\"\"\" submission =", "* 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\", {}).get(\"hash\")", "submission def test_build_number(converted_tests): \"\"\"Should contain build number used to initialize the builder\"\"\" submission", "assert submission.get(\"number\") == \"b\", submission def test_tests(): \"\"\"Should contain test results used to", "None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration ID used to initialize the builder\"\"\"", "with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission", "def test_version_details(converted_tests): \"\"\"Should contain version details from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as", "coding: utf-8 -*- import mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce", "\"\"\"Should produce something\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission is not None", "= SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain", "\"b\", converted_tests).build() assert submission is not None def test_configuration_id(converted_tests): \"\"\"Should contain target configuration", "= SubmissionBuilder(\"t\", \"b\", [\"anything\"]).build() assert submission.get(\"results\") == [\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain", "the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission def", "import mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests): \"\"\"Should produce something\"\"\" submission =", "[\"anything\"], submission def test_version_details(converted_tests): \"\"\"Should contain version details from git head commit\"\"\" with", "configuration ID used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert", "target configuration ID used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build()", "build number used to initialize the builder\"\"\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert", "SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"target\") == \"t\", submission def test_build_number(converted_tests): \"\"\"Should contain build", "\"\"\"Should contain version details from git head commit\"\"\" with mock.patch(\"mig3_client.git\") as patched_git: patched_git.Repo().head.object.hexsha", "patched_git.Repo().head.object.hexsha = \"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build()", "\"a1\" * 20 patched_git.Repo().head.object.author.email = \"<EMAIL>\" submission = SubmissionBuilder(\"t\", \"b\", converted_tests).build() assert submission.get(\"version\",", "# -*- coding: utf-8 -*- import mock from mig3_client import SubmissionBuilder def test_minimum_viable_submission(converted_tests):" ]
[ "COPYRIGHT file) \"\"\" def od(value): def spaced(value): even = None for v in", "3-clause BSD. (See the COPYRIGHT file) \"\"\" def od(value): def spaced(value): even =", "v in value: if even is False: yield ' ' yield '%02X' %", "2009-09-06. Copyright (c) 2009-2017 Exa Networks. All rights reserved. License: 3-clause BSD. (See", "is False: yield ' ' yield '%02X' % v even = not even", "utf-8 \"\"\" od.py Created by <NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa Networks.", "rights reserved. License: 3-clause BSD. (See the COPYRIGHT file) \"\"\" def od(value): def", "def od(value): def spaced(value): even = None for v in value: if even", "even is False: yield ' ' yield '%02X' % v even = not", "spaced(value): even = None for v in value: if even is False: yield", "reserved. License: 3-clause BSD. (See the COPYRIGHT file) \"\"\" def od(value): def spaced(value):", "BSD. (See the COPYRIGHT file) \"\"\" def od(value): def spaced(value): even = None", "even = None for v in value: if even is False: yield '", "encoding: utf-8 \"\"\" od.py Created by <NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa", "def spaced(value): even = None for v in value: if even is False:", "= None for v in value: if even is False: yield ' '", "the COPYRIGHT file) \"\"\" def od(value): def spaced(value): even = None for v", "None for v in value: if even is False: yield ' ' yield", "by <NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa Networks. All rights reserved. License:", "Networks. All rights reserved. License: 3-clause BSD. (See the COPYRIGHT file) \"\"\" def", "2009-2017 Exa Networks. All rights reserved. License: 3-clause BSD. (See the COPYRIGHT file)", "False: yield ' ' yield '%02X' % v even = not even return", "License: 3-clause BSD. (See the COPYRIGHT file) \"\"\" def od(value): def spaced(value): even", "od.py Created by <NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa Networks. All rights", "Exa Networks. All rights reserved. License: 3-clause BSD. (See the COPYRIGHT file) \"\"\"", "on 2009-09-06. Copyright (c) 2009-2017 Exa Networks. All rights reserved. License: 3-clause BSD.", "Created by <NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa Networks. All rights reserved.", "file) \"\"\" def od(value): def spaced(value): even = None for v in value:", "<NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa Networks. All rights reserved. License: 3-clause", "od(value): def spaced(value): even = None for v in value: if even is", "value: if even is False: yield ' ' yield '%02X' % v even", "<reponame>pierky/exabgp # encoding: utf-8 \"\"\" od.py Created by <NAME> on 2009-09-06. Copyright (c)", "if even is False: yield ' ' yield '%02X' % v even =", "# encoding: utf-8 \"\"\" od.py Created by <NAME> on 2009-09-06. Copyright (c) 2009-2017", "for v in value: if even is False: yield ' ' yield '%02X'", "(See the COPYRIGHT file) \"\"\" def od(value): def spaced(value): even = None for", "in value: if even is False: yield ' ' yield '%02X' % v", "Copyright (c) 2009-2017 Exa Networks. All rights reserved. License: 3-clause BSD. (See the", "(c) 2009-2017 Exa Networks. All rights reserved. License: 3-clause BSD. (See the COPYRIGHT", "\"\"\" def od(value): def spaced(value): even = None for v in value: if", "\"\"\" od.py Created by <NAME> on 2009-09-06. Copyright (c) 2009-2017 Exa Networks. All", "yield ' ' yield '%02X' % v even = not even return ''.join(spaced(value))", "All rights reserved. License: 3-clause BSD. (See the COPYRIGHT file) \"\"\" def od(value):" ]
[ "\\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <=", "= db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id <=", "@app.route('/') def index(): if g.sess is None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new')", "\"total\": sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] +", "= Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id)", "@wraps(func) def route(*args, **kwargs): if g.sess and g.sess.can(action): return func(*args, **kwargs) else: abort(404)", "0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit')", "app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports')", "= db.session.query(column, db.func.count()).group_by(column) result = {} for row in query: result[row[0]] = row[1]", "in query_string.split(): like_string = '%' + part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit", "part.lower() in name.lower() banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement')", "= d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18) -", "= \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids =", "return sessions_new(error_message=\"Name is missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess)", "| (term == app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter def name(self, value):", "return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description =", "@app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\")", "result = {} for row in query: result[row[0]] = row[1] return result def", "db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at =", "for count, term, year, week in membership_count: if term == \"Lifetime\": lifetime +=", "= filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id =", "isinstance(thing, Membership): if thing.settled_by is None: return thing.created_by == self.id if action ==", "and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid = None membership.vipps_transaction_id", "nullable=False) price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False)", "Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index'))", "sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account)", "[] if len(memberships) < limit: # Search in blacklist banned = app.config[\"BLACKLIST\"] for", "redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports')", ".where(Membership.settled_by == None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit()", "== self.id if action == 'edit': if isinstance(thing, Membership): return True return False", "= Session.query.get(session['session_id']) # Closed sessions are not valid if sess.closed_at is not None:", "nullable=False) queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False)", "None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle',", "def logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess", "pytz from functools import wraps from flask import Flask, render_template, request, redirect, url_for,", "sum(count for count,_,_ in sessions), 'price': sum(price for _,price,_ in sessions), } return", "divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code return code @classmethod def count_dict(cls, column):", "is None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level']", "dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func)", "return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"],", "level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return", "@app.context_processor def inject_helpers(): def localize(d): if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return", "@app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance')", "db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") | (term == app.config['TERM']) @hybrid_property", "default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path())", "are not settled if isinstance(thing, Membership): if thing.settled_by is None: return thing.created_by ==", "jsonify, session, g, abort from calendar import month_name from collections import defaultdict, namedtuple", "in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0] == 'H')) })", "len(memberships) < limit: # Search in blacklist banned = app.config[\"BLACKLIST\"] for part in", "state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\"", "self.memberships = memberships self.accuracy = 0 self.parse_transaction() def is_complete(self): return len(self.memberships) > 0", "def requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess and g.sess.can(action): return", "if 'session_id' in session: sess = Session.query.get(session['session_id']) # Closed sessions are not valid", "db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda:", "return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for part", "Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit()", "@requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else: account", "# We can only delete our own memberships which are not settled if", "'SM', 'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False)", "datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout():", "@requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else:", "\"Lifetime\": lifetime += count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary =", "app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\"", "= request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def", "logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess and", "db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term", "def is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def can(self, action, thing=None): if self.level", "request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names,", "transaction, memberships): self.transaction = transaction self.memberships = memberships self.accuracy = 0 self.parse_transaction() def", "file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit()", "in sessions), 'price': sum(price for _,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary,", "action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True if action", "methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids", "memberships): self.transaction = transaction self.memberships = memberships self.accuracy = 0 self.parse_transaction() def is_complete(self):", "= \"\" while epoch > 0: epoch, i = divmod(epoch, len(self.ALPHABET)) code =", "= db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term)", "summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'):", "g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by),", "== \"pending\": return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships):", "render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term ==", "memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else: account = \"Entrance\"", "= db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at =", "created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def", "import timezone import pytz from functools import wraps from flask import Flask, render_template,", "cmd: idx = cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U)", ".values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all()", "if isinstance(thing, Membership): return True return False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True)", "session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit()", "if action == 'sessions_list': return self.is_atleast('SM') if action == 'delete': # We can", "render_template, request, redirect, url_for, jsonify, session, g, abort from calendar import month_name from", "\"\".join(str(x) for x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\", "int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch > 0: epoch, i = divmod(epoch, len(self.ALPHABET))", "can(self, action, thing=None): if self.level == 'Superadmin': return True if action == 'settlement':", "Old sessions are not valid elif (datetime.now() - sess.created_at) > timedelta(days = 1):", "tid not in accepted_tids: continue mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id,", "return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True if action == 'reports': return", "= memberships self.accuracy = 0 self.parse_transaction() def is_complete(self): return len(self.memberships) > 0 def", "= tid errors = [] if membership.name.strip() == '': errors.append(\"Name is required\") if", "# Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer,", "'settlement': return self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin') if action == 'wristband':", "redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem)", "= request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if", "db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account = db.Column(db.Text,", "'', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name if", "return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def", "sqlalchemy.ext.hybrid import hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] =", "def can(self, action, thing=None): if self.level == 'Superadmin': return True if action ==", "render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description = request.args['description']", "= app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy =", "if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return", "\"\" while epoch > 0: epoch, i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i]", "key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path,", "True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] =", "return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\")", "= [] if len(memberships) < limit: # Search in blacklist banned = app.config[\"BLACKLIST\"]", "self._name @name.setter def name(self, value): self._name = value self.queryname = value.lower() def is_free(self):", "'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def", "name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name if cmd.group(1)", "datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at =", ".values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships", "redirect, url_for, jsonify, session, g, abort from calendar import month_name from collections import", "'sess', None) @app.context_processor def inject_helpers(): def localize(d): if d.tzinfo is None: d =", "self.accuracy = 0 self.parse_transaction() def is_complete(self): return len(self.memberships) > 0 def parse_transaction(self): amount", "membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week',", "required\") if len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new',", "return d.astimezone(tz) def latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18) - timedelta(days =", "10 else: return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id", "request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term,", "VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file'] report =", "= request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid = None membership.vipps_transaction_id = tid errors", "\"year\": int(year), \"week\": week}) summary = [] for term in terms: summary.append({ \"name\":", "accepted_tids: continue mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem)", "self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM') if action == 'delete': # We", ".order_by('year', 'week') terms = defaultdict(lambda: []) lifetime = 0 for count, term, year,", "= Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE']", "def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name = db.Column('name',", "url_for, jsonify, session, g, abort from calendar import month_name from collections import defaultdict,", "methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else:", "are not valid if sess.closed_at is not None: sess = None # Old", "Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if", "description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete',", "re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name", "def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id):", "price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) #", "not None: sess = None # Old sessions are not valid elif (datetime.now()", "= datetime.now() now = now.replace(year=now.year-18) - timedelta(days = 1) return now def epoch(d):", "app.config['PRICE'] * 10 else: return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class", "@app.before_request def before_request(): if 'session_id' in session: sess = Session.query.get(session['session_id']) # Closed sessions", "Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id", "\\ .join(Membership.created_session) \\ .all() summary = { 'count': sum(count for count,_,_ in sessions),", "\\ .where(Membership.settled_by == None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update)", "start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def requires(action): def", "amount == price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy", "db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\")", "if action == 'delete': # We can only delete our own memberships which", "summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\")", "sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit():", "levels.index(self.level) >= levels.index(level) def can(self, action, thing=None): if self.level == 'Superadmin': return True", "vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index')) @app.errorhandler(404) def", "== account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\", "render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem):", "def bootstrap_class(self): if self.state == \"created\": return \"danger\" if self.state == \"uploaded\": return", "@app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id )", "db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow,", "value): self._name = value self.queryname = value.lower() def is_free(self): return self.price == 0", "account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index')) @app.errorhandler(404)", "in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def", "Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week')", "'': errors.append(\"Name is required\") if len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership)", "@requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports():", "[] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t in transactions] @app.before_request def before_request():", "app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\"", "else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description = request.args['description'] return", "if action == 'settlement_all': return self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if", "db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id)", "db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session", "return self.is_atleast('SM') if action == 'delete': # We can only delete our own", "db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index')) @app.errorhandler(404) def page_not_found(e): return render_template('404.html'), 404", "terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r in terms[term]), \"year\":", "+ 2000, \"sortkey\": term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True)", "'count': sum(count for count,_,_ in sessions), 'price': sum(price for _,price,_ in sessions), }", "self.transaction.amount if amount == price_for_term('Current'): self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term", "'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets", "def route(*args, **kwargs): if g.sess and g.sess.can(action): return func(*args, **kwargs) else: abort(404) return", "to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch > 0:", "range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too", "> 0: epoch, i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code return", "= request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return", "account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow,", "'Lifetime': return app.config['PRICE'] * 10 else: return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin',", "name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name if cmd.group(1) and amount", "lambda name: part.lower() in name.lower() banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned)", "= {} for row in query: result[row[0]] = row[1] return result def price_for_term(term):", "def sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return", "db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term =", "self.parse_transaction() def is_complete(self): return len(self.memberships) > 0 def parse_transaction(self): amount = self.transaction.amount if", ".filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created =", "= db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by],", ".join(Membership.created_session) \\ .all() summary = { 'count': sum(count for count,_,_ in sessions), 'price':", "datetime.now() now = now.replace(year=now.year-18) - timedelta(days = 1) return now def epoch(d): start", "= db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self):", "= ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar to 1 def code(self):", "name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return", "\"\")\\ .replace(\"I\", \"\") # too similar to 1 def code(self): # convert to", "list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) < limit: # Search in blacklist banned", "\\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all() summary = { 'count': sum(count", "thing.created_by == self.id if action == 'edit': if isinstance(thing, Membership): return True return", "% self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\": return", "price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) ==", "return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html',", "def vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state =", "= db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime,", "def is_complete(self): return len(self.memberships) > 0 def parse_transaction(self): amount = self.transaction.amount if amount", "import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] =", "request, redirect, url_for, jsonify, session, g, abort from calendar import month_name from collections", "nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by =", "in terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r in terms[term]),", "if len(tid) == 0: tid = None membership.vipps_transaction_id = tid errors = []", "= db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'),", "methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return", "decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess and g.sess.can(action): return func(*args, **kwargs) else:", "name, term, tid in zip(names, terms, tids): if tid not in accepted_tids: continue", ") def logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs): if", "None: sess = None # Old sessions are not valid elif (datetime.now() -", "\"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\":", "\"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction = transaction", "= [t.id for t in transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for", "= 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM']", "{ 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = []", "= list(self.transactions()) trans_ids = [t.id for t in transactions] mapping = {} memberships", "def decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess and g.sess.can(action): return func(*args, **kwargs)", "if action == 'settlement': return self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin') if", "created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer,", "in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t in", "= defaultdict(lambda: []) lifetime = 0 for count, term, year, week in membership_count:", "name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form and", "isinstance(thing, Membership): return True return False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state", "return \"\" if self.state == \"resolved\": return \"success\" if self.state == \"pending\": return", "transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id", "class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level = db.Column(db.Text,", "Session.query.get(session['session_id']) # Closed sessions are not valid if sess.closed_at is not None: sess", "os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer,", "'week') terms = defaultdict(lambda: []) lifetime = 0 for count, term, year, week", "not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"),", ".where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships')", "memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem", "if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now =", "= app.config[\"BLACKLIST\"] for part in query_string.split(): matches = lambda name: part.lower() in name.lower()", "SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name =", "render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report)", "sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps',", "def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report", "thing=None): if self.level == 'Superadmin': return True if action == 'settlement': return self.is_atleast('SM')", "if g.sess is None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level", "similar to 1 def code(self): # convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple()))", ") membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip()", "reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year',", "epoch=epoch ) def logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs):", "for r in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0] ==", "m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for", "queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account", "app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50", "is not None: sess = None # Old sessions are not valid elif", "redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new')", "delete our own memberships which are not settled if isinstance(thing, Membership): if thing.settled_by", "[t.id for t in transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m", "part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned", "def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count", "term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary,", "> 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor')", "'Superadmin': return True if action == 'settlement': return self.is_atleast('SM') if action == 'settlement_all':", "decorator @app.route('/') def index(): if g.sess is None: return render_template('index.html') else: return redirect(url_for('memberships_new'))", "== 'sessions_list': return self.is_atleast('SM') if action == 'delete': # We can only delete", "= request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership,", "= os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin':", "in name.lower() banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def", "defaultdict(lambda: []) lifetime = 0 for count, term, year, week in membership_count: if", "db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return", "'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid", "own memberships which are not settled if isinstance(thing, Membership): if thing.settled_by is None:", "@requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price =", "vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\")", "description = db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at =", "g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy():", "vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report =", "return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\": return \"danger\" if self.state ==", "return result def price_for_term(term): if term == 'Lifetime': return app.config['PRICE'] * 10 else:", "return \"success\" if self.state == \"pending\": return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)'", "if g.sess.can('settlement_all'): account = request.form['account'] else: account = \"Entrance\" update = db.update(Membership) \\", "== \"created\": return \"danger\" if self.state == \"uploaded\": return \"\" if self.state ==", "membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership", "action == 'memberships_new': return True if action == 'reports': return self.is_atleast('Admin') if action", "memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at'))", "sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess = Session( level=level,", "memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) < limit: # Search in", "memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string =", "return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx =", "methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def", "list(self.transactions()) trans_ids = [t.id for t in transactions] mapping = {} memberships =", "valid if sess.closed_at is not None: sess = None # Old sessions are", "from datetime import datetime, timedelta import time from pytz import timezone import pytz", "request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"]", "in blacklist banned = app.config[\"BLACKLIST\"] for part in query_string.split(): matches = lambda name:", "def before_request(): if 'session_id' in session: sess = Session.query.get(session['session_id']) # Closed sessions are", "'<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'),", "lifetime = 0 for count, term, year, week in membership_count: if term ==", "if isinstance(thing, Membership): if thing.settled_by is None: return thing.created_by == self.id if action", "timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db =", "return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id =", "db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session =", "**kwargs): if g.sess and g.sess.can(action): return func(*args, **kwargs) else: abort(404) return route return", "\"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:]) +", "count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary = [] for term", "too similar to 1 def code(self): # convert to Unix epoch epoch =", "def name(self): return self._name @name.setter def name(self, value): self._name = value self.queryname =", "created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return", "sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports =", "= \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH']", ") \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id", "= VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file'] report", "db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by", "= { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] =", "query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if", "report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id)", "return self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return", "g, abort from calendar import month_name from collections import defaultdict, namedtuple import re", "epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch > 0: epoch, i =", "db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\", "None: return thing.created_by == self.id if action == 'edit': if isinstance(thing, Membership): return", "if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0:", "= \"\".join(str(x) for x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\",", "class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction = transaction self.memberships", "price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message,", "= (term == \"Lifetime\") | (term == app.config['TERM']) @hybrid_property def name(self): return self._name", "name if cmd.group(1) and amount == price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and", "memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions')", "[] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in", "@app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password !=", "last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id", "= Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc())", "= 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS']", "= re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx = cmd.end(0) name = self.transaction.message[idx:] name", "SQLAlchemy from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app", "new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return", "'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz =", "backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") | (term", ">= levels.index(level) def can(self, action, thing=None): if self.level == 'Superadmin': return True if", "= db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account =", "account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid", "render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports)", "db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term =", "cmd.group(1) and amount == price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and amount ==", "sess.created_at) > timedelta(days = 1): sess = None setattr(g, 'sess', sess) else: setattr(g,", "row in query: result[row[0]] = row[1] return result def price_for_term(term): if term ==", "import defaultdict, namedtuple import re import os from flask_sqlalchemy import SQLAlchemy from flask_assets", "= 0 for count, term, year, week in membership_count: if term == \"Lifetime\":", "return self.price == 0 ALPHABET = \"\".join(str(x) for x in range(10)) ALPHABET +=", "account = request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price),", "sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id", "0: epoch, i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code return code", "lifetime += count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary = []", "terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda", "db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\")", "'', name, re.U) self.name = name if cmd.group(1) and amount == price_for_term('Current'): self.accuracy", "mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem =", "else: return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id =", "return code @classmethod def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result = {}", "return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships)", "timedelta(days = 1) return now def epoch(d): start = datetime.utcfromtimestamp(0) return (d -", "return route return decorator @app.route('/') def index(): if g.sess is None: return render_template('index.html')", "mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t in transactions] @app.before_request", "Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by == None) \\", "app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in os.environ:", "t in transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships:", "if self.level == 'Superadmin': return True if action == 'settlement': return self.is_atleast('SM') if", "latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18) - timedelta(days = 1) return now", "nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True)", "if term == 'Lifetime': return app.config['PRICE'] * 10 else: return app.config['PRICE'] levels =", "memberships which are not settled if isinstance(thing, Membership): if thing.settled_by is None: return", "{ 'count': sum(count for count,_,_ in sessions), 'price': sum(price for _,price,_ in sessions),", "for part in query_string.split(): like_string = '%' + part.lower() + '%' query =", "query_string.split(): matches = lambda name: part.lower() in name.lower() banned = filter(matches, banned) return", "Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports", "== price_for_term('Lifetime'): self.accuracy = 2 else: self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name)", "created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST'])", "week in membership_count: if term == \"Lifetime\": lifetime += count else: terms[term].append({\"count\": count,", "[])) for t in transactions] @app.before_request def before_request(): if 'session_id' in session: sess", "@hybrid_property def name(self): return self._name @name.setter def name(self, value): self._name = value self.queryname", "name, re.U) self.name = name if cmd.group(1) and amount == price_for_term('Current'): self.accuracy =", "= request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid in", "self.level == 'Superadmin': return True if action == 'settlement': return self.is_atleast('SM') if action", "request.form['account'] else: account = \"Entrance\" update = db.update(Membership) \\ .where(Membership.account == account) \\", "settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def", "epoch > 0: epoch, i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code", "context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname", "(term == app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter def name(self, value): self._name", "assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id", "term in terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r in", "self.id if action == 'edit': if isinstance(thing, Membership): return True return False class", "if cmd: idx = cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name,", "\"sortkey\": term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html',", "def latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18) - timedelta(days = 1) return", "= value.lower() def is_free(self): return self.price == 0 ALPHABET = \"\".join(str(x) for x", "in transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if", "import re import os from flask_sqlalchemy import SQLAlchemy from flask_assets import Environment, Bundle", "mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t in transactions]", "memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id,", "return True if action == 'settlement': return self.is_atleast('SM') if action == 'settlement_all': return", "= datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def", "return thing.created_by == self.id if action == 'edit': if isinstance(thing, Membership): return True", "'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', }", "= [] for term in terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"]", "not settled if isinstance(thing, Membership): if thing.settled_by is None: return thing.created_by == self.id", "part in query_string.split(): like_string = '%' + part.lower() + '%' query = query.filter(Membership.queryname.like(like_string))", "def name(self, value): self._name = value self.queryname = value.lower() def is_free(self): return self.price", "= 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm',", "# convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch", "not in accepted_tids: continue mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term)", "action == 'reports': return self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM') if action", "membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"],", "None) @app.context_processor def inject_helpers(): def localize(d): if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc)", "render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"],", "= VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def", "price_for_term(term): if term == 'Lifetime': return app.config['PRICE'] * 10 else: return app.config['PRICE'] levels", "== 'Superadmin': return True if action == 'settlement': return self.is_atleast('SM') if action ==", "@app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level, description=description,", "request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess =", "in membership_count: if term == \"Lifetime\": lifetime += count else: terms[term].append({\"count\": count, \"year\":", "== 0 ALPHABET = \"\".join(str(x) for x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET", "app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy = 1", "return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM'])", "return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y',", "db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term = db.Column(db.Text,", "= [] if membership.name.strip() == '': errors.append(\"Name is required\") if len(errors) > 0:", "part in query_string.split(): matches = lambda name: part.lower() in name.lower() banned = filter(matches,", "db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>',", "else: account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by)", "self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx = cmd.end(0)", "= Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new')", "i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code return code @classmethod def", "requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess and g.sess.can(action): return func(*args,", "default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level) >= levels.index(level)", "0 ALPHABET = \"\".join(str(x) for x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET =", "'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE'])", "= app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return", "= request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level =", "Session(db.Model): id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False)", "summary = [] for term in terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\":", "price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy = 2", "= \"Lifetime\" else: return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if", "reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query \\", "our own memberships which are not settled if isinstance(thing, Membership): if thing.settled_by is", "month_name from collections import defaultdict, namedtuple import re import os from flask_sqlalchemy import", "@app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id):", "= True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE']", "= VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names", "Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text,", "import hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True", "tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR')", "# Search in blacklist banned = app.config[\"BLACKLIST\"] for part in query_string.split(): matches =", "== \"Lifetime\": lifetime += count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary", "start = datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch )", "sum(price for _,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle',", "\\ .where(Membership.account == account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id <= max_id) \\", "cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$',", "= re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name if cmd.group(1) and amount ==", "return False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at =", "before_request(): if 'session_id' in session: sess = Session.query.get(session['session_id']) # Closed sessions are not", "= 1) return now def epoch(d): start = datetime.utcfromtimestamp(0) return (d - start).total_seconds()", "membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id'", "self.state == \"created\": return \"danger\" if self.state == \"uploaded\": return \"\" if self.state", "return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete',", "price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index')) @app.errorhandler(404) def page_not_found(e): return", "year, week in membership_count: if term == \"Lifetime\": lifetime += count else: terms[term].append({\"count\":", "name(self, value): self._name = value self.queryname = value.lower() def is_free(self): return self.price ==", "import pytz from functools import wraps from flask import Flask, render_template, request, redirect,", "def code(self): # convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\"", "= db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown", "code = self.ALPHABET[i] + code return code @classmethod def count_dict(cls, column): query =", "now def epoch(d): start = datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict( localize=localize,", "from flask_sqlalchemy import SQLAlchemy from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property", "= Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in", "in zip(names, terms, tids): if tid not in accepted_tids: continue mem = Membership(", "def entries(self): transactions = list(self.transactions()) trans_ids = [t.id for t in transactions] mapping", "level = request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST'])", "settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\",", "0 for count, term, year, week in membership_count: if term == \"Lifetime\": lifetime", "def inject_helpers(): def localize(d): if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz)", "nullable=False) level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)", "= db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by", "# Closed sessions are not valid if sess.closed_at is not None: sess =", "+= string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar to", "return (d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id')", "timedelta(days = 1): sess = None setattr(g, 'sess', sess) else: setattr(g, 'sess', None)", "for m in memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m)", "action == 'settlement_all': return self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action", "(d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def", "if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy = 2 else: self.name = \"%s", "d.astimezone(tz) def latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18) - timedelta(days = 1)", "= \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account", "self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids = [t.id for t in transactions]", "if amount == price_for_term('Current'): self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term =", "= Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def", "COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction = transaction self.memberships = memberships", "def is_free(self): return self.price == 0 ALPHABET = \"\".join(str(x) for x in range(10))", "sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong", "Membership): return True return False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state =", "= VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids =", "in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class", "else: account = \"Entrance\" update = db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by", "in range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") #", "Membership): if thing.settled_by is None: return thing.created_by == self.id if action == 'edit':", "localize(d): if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now", "entries(self): transactions = list(self.transactions()) trans_ids = [t.id for t in transactions] mapping =", "return app.config['PRICE'] * 10 else: return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin']", "nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def", "= request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names, terms, tids):", "app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST']", "if self.state == \"created\": return \"danger\" if self.state == \"uploaded\": return \"\" if", "= datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10)", "'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets =", "silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory =", "return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False)", "self.price == 0 ALPHABET = \"\".join(str(x) for x in range(10)) ALPHABET += string.ascii_uppercase", "in accepted_tids: continue mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) )", "sessions_new(error_message=\"Name is missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit()", "\"pending\": return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction", "_name = db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False)", "bootstrap_class(self): if self.state == \"created\": return \"danger\" if self.state == \"uploaded\": return \"\"", "price_for_term('Lifetime'): self.accuracy = 2 else: self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def", "1): sess = None setattr(g, 'sess', sess) else: setattr(g, 'sess', None) @app.context_processor def", "= ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description =", "\"Lifetime\") | (term == app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter def name(self,", "\"resolved\": return \"success\" if self.state == \"pending\": return \"warning\" class Entry: COMMAND_PATTERN =", "in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid =", "= db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self):", "<= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list')", "g.sess and g.sess.can(action): return func(*args, **kwargs) else: abort(404) return route return decorator @app.route('/')", "\"danger\" if self.state == \"uploaded\": return \"\" if self.state == \"resolved\": return \"success\"", "settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def", "int(year), \"week\": week}) summary = [] for term in terms: summary.append({ \"name\": term,", "sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships", "convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch >", "if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is", "code return code @classmethod def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result =", "sess = None # Old sessions are not valid elif (datetime.now() - sess.created_at)", "\\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled =", "def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new():", "str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime')", "r in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0] == 'H'))", "= db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)", "banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if", "\"created\": return \"danger\" if self.state == \"uploaded\": return \"\" if self.state == \"resolved\":", "account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all()", "methods=['POST']) def vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state", "request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not", "db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level) >=", "= 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) < limit: #", "account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account =", "+= count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary = [] for", "from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app =", "name.lower() banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle():", "db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by ==", "memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if", "level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at", "parse_transaction(self): amount = self.transaction.amount if amount == price_for_term('Current'): self.term = app.config['TERM'] elif amount", "for count,_,_ in sessions), 'price': sum(price for _,price,_ in sessions), } return render_template('memberships/settle.html',", "@app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path())", "datetime import datetime, timedelta import time from pytz import timezone import pytz from", "vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development", "= db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price", "vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file =", "terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid", "- timedelta(days = 1) return now def epoch(d): start = datetime.utcfromtimestamp(0) return (d", "0 def parse_transaction(self): amount = self.transaction.amount if amount == price_for_term('Current'): self.term = app.config['TERM']", "VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False)", "return True return False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text)", "membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST'])", "len(tid) == 0: tid = None membership.vipps_transaction_id = tid errors = [] if", "== app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter def name(self, value): self._name =", "__init__(self, transaction, memberships): self.transaction = transaction self.memberships = memberships self.accuracy = 0 self.parse_transaction()", "Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps')", "count, term, year, week in membership_count: if term == \"Lifetime\": lifetime += count", "max_id) \\ .join(Membership.created_session) \\ .all() summary = { 'count': sum(count for count,_,_ in", "else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary = [] for term in", "ALPHABET = \"\".join(str(x) for x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\", "return decorator @app.route('/') def index(): if g.sess is None: return render_template('index.html') else: return", "len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) +", "Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda: []) lifetime = 0 for count,", "= None # Old sessions are not valid elif (datetime.now() - sess.created_at) >", "action == 'settlement': return self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin') if action", "request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), )", "+ '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST'])", "reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships)", "code @classmethod def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result = {} for", "backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") | (term == app.config['TERM']) @hybrid_property def name(self):", "# too similar to 1 def code(self): # convert to Unix epoch epoch", "1 def code(self): # convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code =", "action == 'edit': if isinstance(thing, Membership): return True return False class VippsReport(db.Model): id", "filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar()", "nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime,", "'reports': return self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM') if action == 'delete':", "self.state == \"uploaded\": return \"\" if self.state == \"resolved\": return \"success\" if self.state", "memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W',", "'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory", "description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def", "self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U)", "VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id):", "amount == price_for_term('Current'): self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\"", "request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid = None", "value.lower() def is_free(self): return self.price == 0 ALPHABET = \"\".join(str(x) for x in", "string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar to 1", "return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST'])", "in query_string.split(): matches = lambda name: part.lower() in name.lower() banned = filter(matches, banned)", "membership.vipps_transaction_id = tid errors = [] if membership.name.strip() == '': errors.append(\"Name is required\")", "return now def epoch(d): start = datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict(", "if self.state == \"uploaded\": return \"\" if self.state == \"resolved\": return \"success\" if", "% (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids = [t.id for t", "banned = [] if len(memberships) < limit: # Search in blacklist banned =", "def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\": return \"danger\" if", "db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by == None)", "term = db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text)", "valid_term = (term == \"Lifetime\") | (term == app.config['TERM']) @hybrid_property def name(self): return", "return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password", "memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query(", "redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def", "memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id]", ".filter(Membership.account == account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session)", ".filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all() summary =", "'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True if action == 'reports':", "Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] =", "flask_sqlalchemy import SQLAlchemy from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import", "render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"]", "terms[term].append({\"count\": count, \"year\": int(year), \"week\": week}) summary = [] for term in terms:", "max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else: account = \"Entrance\" update", "len(self.memberships) > 0 def parse_transaction(self): amount = self.transaction.amount if amount == price_for_term('Current'): self.term", "terms, tids): if tid not in accepted_tids: continue mem = Membership( name=name, term=term,", "closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def can(self,", "'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level", "'#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new')", "g.sess.can(action): return func(*args, **kwargs) else: abort(404) return route return decorator @app.route('/') def index():", "flask import Flask, render_template, request, redirect, url_for, jsonify, session, g, abort from calendar", "\"uploaded\": return \"\" if self.state == \"resolved\": return \"success\" if self.state == \"pending\":", "@requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') )", "mapping.get(t.id, [])) for t in transactions] @app.before_request def before_request(): if 'session_id' in session:", "Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context):", "term, year, week in membership_count: if term == \"Lifetime\": lifetime += count else:", "term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete',", "== price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN,", "Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string =", "def memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string", "import datetime, timedelta import time from pytz import timezone import pytz from functools", "d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18) - timedelta(days", "= self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name,", "if self.state == \"resolved\": return \"success\" if self.state == \"pending\": return \"warning\" class", "trans_ids = [t.id for t in transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids))", "level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\")", "We can only delete our own memberships which are not settled if isinstance(thing,", "@app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new')", "levels.index(level) def can(self, action, thing=None): if self.level == 'Superadmin': return True if action", "self._name = value self.queryname = value.lower() def is_free(self): return self.price == 0 ALPHABET", "re.U) self.name = name if cmd.group(1) and amount == price_for_term('Current'): self.accuracy = 2", "render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account", "import wraps from flask import Flask, render_template, request, redirect, url_for, jsonify, session, g,", "errors = [] if membership.name.strip() == '': errors.append(\"Name is required\") if len(errors) >", "= db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") | (term == app.config['TERM'])", "app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM':", "db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for", "db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions =", "request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else: account = \"Entrance\" update = db.update(Membership)", "return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions", "= request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session", "for t in transactions] mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in", "transaction self.memberships = memberships self.accuracy = 0 self.parse_transaction() def is_complete(self): return len(self.memberships) >", "vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\": return \"danger\" if self.state == \"uploaded\":", "else: setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def localize(d): if d.tzinfo is None:", "mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search():", "== 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports')", "Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'),", "localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def", "= SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name", "db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session(", "None setattr(g, 'sess', sess) else: setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def localize(d):", "self.transaction = transaction self.memberships = memberships self.accuracy = 0 self.parse_transaction() def is_complete(self): return", "== 'delete': # We can only delete our own memberships which are not", "is None: return thing.created_by == self.id if action == 'edit': if isinstance(thing, Membership):", ".all() summary = { 'count': sum(count for count,_,_ in sessions), 'price': sum(price for", "for name, term, tid in zip(names, terms, tids): if tid not in accepted_tids:", "= cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name =", "= request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else: account = \"Entrance\" update =", "account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\", "\"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html', report=report)", "timezone import pytz from functools import wraps from flask import Flask, render_template, request,", "terms = defaultdict(lambda: []) lifetime = 0 for count, term, year, week in", "> 0 def parse_transaction(self): amount = self.transaction.amount if amount == price_for_term('Current'): self.term =", "def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search')", "Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string = '%' + part.lower() + '%' query", "app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = {", "{} for row in query: result[row[0]] = row[1] return result def price_for_term(term): if", "foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") |", "thing.settled_by is None: return thing.created_by == self.id if action == 'edit': if isinstance(thing,", "query = db.session.query(column, db.func.count()).group_by(column) result = {} for row in query: result[row[0]] =", "self.ALPHABET[i] + code return code @classmethod def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column)", "term = request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html',", "self.accuracy = 2 if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy = 2 else:", "g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships =", "epoch(d): start = datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch", "\"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda k:", "membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem)", "1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx = cmd.end(0) name =", "= datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at", "* 10 else: return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model):", "value self.queryname = value.lower() def is_free(self): return self.price == 0 ALPHABET = \"\".join(str(x)", "self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True", "@app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership =", "term == 'Lifetime': return app.config['PRICE'] * 10 else: return app.config['PRICE'] levels = ['Funk',", "nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term =", "mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state =", "db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price =", "is missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"]", "id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def", "t in transactions] @app.before_request def before_request(): if 'session_id' in session: sess = Session.query.get(session['session_id'])", "import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI']", "'week', Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda: []) lifetime = 0 for", "elif (datetime.now() - sess.created_at) > timedelta(days = 1): sess = None setattr(g, 'sess',", "\\ .order_by('year', 'week') terms = defaultdict(lambda: []) lifetime = 0 for count, term,", "vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\"", "[]) lifetime = 0 for count, term, year, week in membership_count: if term", "session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level,", "is_free(self): return self.price == 0 ALPHABET = \"\".join(str(x) for x in range(10)) ALPHABET", "return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html',", "cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy = 2 else: self.name = \"%s %s\"", "term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'):", "render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions =", ".filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all() summary = { 'count': sum(count for", "== '': errors.append(\"Name is required\") if len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors)", "len(self.ALPHABET)) code = self.ALPHABET[i] + code return code @classmethod def count_dict(cls, column): query", "> timedelta(days = 1): sess = None setattr(g, 'sess', sess) else: setattr(g, 'sess',", "sessions are not valid elif (datetime.now() - sess.created_at) > timedelta(days = 1): sess", "while epoch > 0: epoch, i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] +", "transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\": return \"danger\" if self.state", "summary = { 'count': sum(count for count,_,_ in sessions), 'price': sum(price for _,price,_", "db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow()", "+ part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit))", "db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True)", "name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '',", "in memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t,", "nullable=False) term = db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id =", "sess) else: setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def localize(d): if d.tzinfo is", "else: self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions())", "self.accuracy = 2 else: self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self):", "= [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR'", "not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t", "= \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>':", "db.func.count()).group_by(column) result = {} for row in query: result[row[0]] = row[1] return result", "app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new',", "cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx = cmd.end(0) name = self.transaction.message[idx:]", "inject_helpers(): def localize(d): if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def", "@app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in query_string.split():", "'delete': # We can only delete our own memberships which are not settled", "and amount == price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and amount == price_for_term('Lifetime'):", "reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit()", "from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG']", "reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file = request.files['file']", "== \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by)", "methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]:", "mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t in transactions] @app.before_request def before_request(): if", "self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids", "= 2 else: self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions", "compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text,", "errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem =", "\"success\" if self.state == \"pending\": return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def", "self.term = \"Lifetime\" else: return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I)", "'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level =", "membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id): mem", "\\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list():", ".replace(\"I\", \"\") # too similar to 1 def code(self): # convert to Unix", "if term == \"Lifetime\": lifetime += count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\":", "d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now = datetime.now()", "nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session =", "sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0]", "and amount == price_for_term('Lifetime'): self.accuracy = 2 else: self.name = \"%s %s\" %", "import SQLAlchemy from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser", "Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'", "user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index'))", "= Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled)", "if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, []))", "= Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return", "in query: result[row[0]] = row[1] return result def price_for_term(term): if term == 'Lifetime':", "\"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account ==", "- start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def requires(action):", "@requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new'))", "level): return levels.index(self.level) >= levels.index(level) def can(self, action, thing=None): if self.level == 'Superadmin':", "= os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id =", "if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True if", "max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account = \"Entrance\"", "0: tid = None membership.vipps_transaction_id = tid errors = [] if membership.name.strip() ==", "namedtuple import re import os from flask_sqlalchemy import SQLAlchemy from flask_assets import Environment,", "not valid elif (datetime.now() - sess.created_at) > timedelta(days = 1): sess = None", "for t in transactions] @app.before_request def before_request(): if 'session_id' in session: sess =", "< limit: # Search in blacklist banned = app.config[\"BLACKLIST\"] for part in query_string.split():", "if sess.closed_at is not None: sess = None # Old sessions are not", "def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result = {} for row in", "redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership", "file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if", "password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess = Session( level=level, user_name=request.form[\"name\"],", "max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account", "query: result[row[0]] = row[1] return result def price_for_term(term): if term == 'Lifetime': return", "request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create():", "settled if isinstance(thing, Membership): if thing.settled_by is None: return thing.created_by == self.id if", "max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def", "id = db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False)", "re.I) if cmd: idx = cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '',", "== 0: tid = None membership.vipps_transaction_id = tid errors = [] if membership.name.strip()", "latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def route(*args,", "accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names, terms, tids): if tid", "= db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at", "query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) <", "action == 'delete': # We can only delete our own memberships which are", "settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") | (term ==", "if action == 'memberships_new': return True if action == 'reports': return self.is_atleast('Admin') if", "r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction = transaction self.memberships = memberships self.accuracy =", "row[1] return result def price_for_term(term): if term == 'Lifetime': return app.config['PRICE'] * 10", "sess = Session.query.get(session['session_id']) # Closed sessions are not valid if sess.closed_at is not", "term, tid in zip(names, terms, tids): if tid not in accepted_tids: continue mem", "'price': sum(price for _,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account)", "self.transaction.message, re.I) if cmd: idx = cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+',", "10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) < limit: # Search", "'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower()", "def vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids =", "amount == price_for_term('Lifetime'): self.accuracy = 2 else: self.name = \"%s %s\" % (self.transaction.first_name,", "tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names, terms,", "db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def", "'sessions_list': return self.is_atleast('SM') if action == 'delete': # We can only delete our", "= row[1] return result def price_for_term(term): if term == 'Lifetime': return app.config['PRICE'] *", "if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q']", "= db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self,", "code(self): # convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while", "g.sess is None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level =", "!= request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess", "action == 'sessions_list': return self.is_atleast('SM') if action == 'delete': # We can only", ") db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session", "re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx = cmd.end(0) name = self.transaction.message[idx:] name =", "utf-8 import string from datetime import datetime, timedelta import time from pytz import", "query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string = '%'", "wraps from flask import Flask, render_template, request, redirect, url_for, jsonify, session, g, abort", "not valid if sess.closed_at is not None: sess = None # Old sessions", "session.pop('session_id') def requires(action): def decorator(func): @wraps(func) def route(*args, **kwargs): if g.sess and g.sess.can(action):", "**kwargs) else: abort(404) return route return decorator @app.route('/') def index(): if g.sess is", "redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session)", "return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\")", "app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin',", "app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app) if", "count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result = {} for row in query:", "func(*args, **kwargs) else: abort(404) return route return decorator @app.route('/') def index(): if g.sess", "Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def", "50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin':", "vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False)", "True return False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at", "request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names, terms, tids): if", "only delete our own memberships which are not settled if isinstance(thing, Membership): if", ".order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by)", "matches = lambda name: part.lower() in name.lower() banned = filter(matches, banned) return render_template('memberships/table.html',", "account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname)", "tid errors = [] if membership.name.strip() == '': errors.append(\"Name is required\") if len(errors)", "def localize(d): if d.tzinfo is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date():", "g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query", "memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price", "created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term", "@app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account']", "db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\")", "== 'Lifetime': return app.config['PRICE'] * 10 else: return app.config['PRICE'] levels = ['Funk', 'SM',", "db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text)", "'Entrance') else: account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\", "from functools import wraps from flask import Flask, render_template, request, redirect, url_for, jsonify,", "created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index')) @app.errorhandler(404) def page_not_found(e):", "@name.setter def name(self, value): self._name = value self.queryname = value.lower() def is_free(self): return", "Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return", "id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name", "if membership.name.strip() == '': errors.append(\"Name is required\") if len(errors) > 0: return render_template('memberships/new.html',", "account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports')", "None membership.vipps_transaction_id = tid errors = [] if membership.name.strip() == '': errors.append(\"Name is", "= divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code return code @classmethod def count_dict(cls,", "self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND']", "summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:])", "= \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return render_template('vipps/show.html',", "def price_for_term(term): if term == 'Lifetime': return app.config['PRICE'] * 10 else: return app.config['PRICE']", "membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if", "= new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout()", "- sess.created_at) > timedelta(days = 1): sess = None setattr(g, 'sess', sess) else:", "= db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session ) \\ .group_by(Membership.created_by) \\ .filter(Membership.account == account) \\", "app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True if action == 'reports': return self.is_atleast('Admin')", "Search in blacklist banned = app.config[\"BLACKLIST\"] for part in query_string.split(): matches = lambda", "methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at =", "like_string = '%' + part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit = 10", "app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name", "sess.closed_at is not None: sess = None # Old sessions are not valid", "= '%' + part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships", "= [] mapping[m.vipps_transaction_id].append(m) return [self.Entry(t, mapping.get(t.id, [])) for t in transactions] @app.before_request def", "def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account", "query_string.split(): like_string = '%' + part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit =", "= 2 if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy = 2 else: self.name", "if thing.settled_by is None: return thing.created_by == self.id if action == 'edit': if", ".group_by(Membership.created_by) \\ .filter(Membership.account == account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id)", "= db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by],", "db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by", "account = request.form['account'] else: account = \"Entrance\" update = db.update(Membership) \\ .where(Membership.account ==", "levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description", "Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch > 0: epoch,", "transactions] @app.before_request def before_request(): if 'session_id' in session: sess = Session.query.get(session['session_id']) # Closed", "from pytz import timezone import pytz from functools import wraps from flask import", "mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query =", "session: sess = Session.query.get(session['session_id']) # Closed sessions are not valid if sess.closed_at is", "from flask import Flask, render_template, request, redirect, url_for, jsonify, session, g, abort from", "db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by =", "'settlement_all': return self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new':", "= db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions", "can only delete our own memberships which are not settled if isinstance(thing, Membership):", "= Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term)", "# coding: utf-8 import string from datetime import datetime, timedelta import time from", "= db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer, nullable=False) term", "self.name = name if cmd.group(1) and amount == price_for_term('Current'): self.accuracy = 2 if", "\\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships =", "'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE',", "new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"]", "primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'],", "(self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids = [t.id for t in", "= name if cmd.group(1) and amount == price_for_term('Current'): self.accuracy = 2 if cmd.group(2)", "return app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer,", "def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import(): file", "'%' query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = []", "\\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all() summary", "request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships)", "return \"danger\" if self.state == \"uploaded\": return \"\" if self.state == \"resolved\": return", "} return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id", "hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY']", "redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in", "@classmethod def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result = {} for row", "datetime.utcnow() db.session.commit() logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term", "report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>')", "def memberships_settle_submit(): max_id = request.form[\"max_id\"] if g.sess.can('settlement_all'): account = request.form['account'] else: account =", "['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True) description = db.Column(db.Text,", "amount = self.transaction.amount if amount == price_for_term('Current'): self.term = app.config['TERM'] elif amount ==", "if action == 'reports': return self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM') if", "epoch, i = divmod(epoch, len(self.ALPHABET)) code = self.ALPHABET[i] + code return code @classmethod", "= r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction = transaction self.memberships = memberships self.accuracy", "default=datetime.utcnow, nullable=False) created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session", "banned = app.config[\"BLACKLIST\"] for part in query_string.split(): matches = lambda name: part.lower() in", "return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST']) def sessions_destroy(): g.sess.closed_at = datetime.utcnow() db.session.commit() logout() return redirect(url_for('index'))", "if len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term)", "user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST'])", "return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'):", ") db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] = new_session.id return redirect(url_for('index')) @app.route('/sessions/delete', methods=['POST'])", "\\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda: []) lifetime =", "last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\") membership.price =", "tid = None membership.vipps_transaction_id = tid errors = [] if membership.name.strip() == '':", "return self._name @name.setter def name(self, value): self._name = value self.queryname = value.lower() def", "redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level,", "in session: sess = Session.query.get(session['session_id']) # Closed sessions are not valid if sess.closed_at", "self.state == \"pending\": return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction,", "[self.Entry(t, mapping.get(t.id, [])) for t in transactions] @app.before_request def before_request(): if 'session_id' in", "} app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz = timezone(app.config['TIMEZONE']) assets = Environment(app)", "\"\") # too similar to 1 def code(self): # convert to Unix epoch", "}) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime():", "== account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\", "def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow()", "error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password", "= request.form['account'] else: account = \"Entrance\" update = db.update(Membership) \\ .where(Membership.account == account)", ") db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index')) @app.errorhandler(404) def page_not_found(e): return render_template('404.html'),", "'%' + part.lower() + '%' query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships =", "return len(self.memberships) > 0 def parse_transaction(self): amount = self.transaction.amount if amount == price_for_term('Current'):", "result def price_for_term(term): if term == 'Lifetime': return app.config['PRICE'] * 10 else: return", "{} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id not in mapping:", "re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name if cmd.group(1) and amount == price_for_term('Current'):", "return [self.Entry(t, mapping.get(t.id, [])) for t in transactions] @app.before_request def before_request(): if 'session_id'", "return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description )", "in transactions] @app.before_request def before_request(): if 'session_id' in session: sess = Session.query.get(session['session_id']) #", "def __init__(self, transaction, memberships): self.transaction = transaction self.memberships = memberships self.accuracy = 0", "now = datetime.now() now = now.replace(year=now.year-18) - timedelta(days = 1) return now def", "\"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>',", "user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def", "VippsReport.query.get(id) return render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names =", "= db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" %", "= \"Entrance\" update = db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by == None)", "zip(names, terms, tids): if tid not in accepted_tids: continue mem = Membership( name=name,", "calendar import month_name from collections import defaultdict, namedtuple import re import os from", "string from datetime import datetime, timedelta import time from pytz import timezone import", "= request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for name,", "description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]] if", "== 'wristband': return app.config['ENABLE_WRISTBAND'] if action == 'memberships_new': return True if action ==", "d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now = datetime.now() now = now.replace(year=now.year-18)", "abort(404) return route return decorator @app.route('/') def index(): if g.sess is None: return", "and g.sess.can(action): return func(*args, **kwargs) else: abort(404) return route return decorator @app.route('/') def", "db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms", "mapping = {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id not", "render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return redirect(url_for('memberships_new', term=membership.term) + '#rf-membership-anchor') @app.route('/memberships/<id>/edit') def memberships_edit(id):", "== 'settlement_all': return self.is_atleast('Admin') if action == 'wristband': return app.config['ENABLE_WRISTBAND'] if action ==", "foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term == \"Lifetime\") | (term == app.config['TERM']) @hybrid_property def", "= price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership =", "timedelta import time from pytz import timezone import pytz from functools import wraps", "sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement') def memberships_settle_submit(): max_id = request.form[\"max_id\"] if", "os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model):", "db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return", "coding: utf-8 import string from datetime import datetime, timedelta import time from pytz", "return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description = request.args['description'] return render_template('sessions/new.html',", "self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy", "request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid = None membership.vipps_transaction_id = tid errors =", "sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit()", "import Flask, render_template, request, redirect, url_for, jsonify, session, g, abort from calendar import", "level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password = app.config['PASSWORDS'][request.form[\"level\"]]", "pytz import timezone import pytz from functools import wraps from flask import Flask,", ".replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar to 1 def code(self): # convert", "\"Entrance\" update = db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by == None) \\", "request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names, terms, tids): if tid not in", "is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def can(self, action, thing=None): if self.level ==", "sess = None setattr(g, 'sess', sess) else: setattr(g, 'sess', None) @app.context_processor def inject_helpers():", "def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\", "db.Column(db.Integer, nullable=False) term = db.Column(db.Text, nullable=False) account = db.Column(db.Text, nullable=False) # Entrance/Wristband/BankAccount/Unknown vipps_transaction_id", "code = \"\" while epoch > 0: epoch, i = divmod(epoch, len(self.ALPHABET)) code", "now.replace(year=now.year-18) - timedelta(days = 1) return now def epoch(d): start = datetime.utcfromtimestamp(0) return", "term == \"Lifetime\": lifetime += count else: terms[term].append({\"count\": count, \"year\": int(year), \"week\": week})", "elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy = 1 cmd", "db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return", "summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships", "= 0 self.parse_transaction() def is_complete(self): return len(self.memberships) > 0 def parse_transaction(self): amount =", "sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description", "primary_key=True) description = db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at", "Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction = transaction self.memberships =", "db.session.query(column, db.func.count()).group_by(column) result = {} for row in query: result[row[0]] = row[1] return", "app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter def name(self, value): self._name = value", "\\ .all() summary = { 'count': sum(count for count,_,_ in sessions), 'price': sum(price", "term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:]) + 2000,", "\"week\": week}) summary = [] for term in terms: summary.append({ \"name\": term, \"rows\":", "def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term)", "%s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids = [t.id for", "= Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string", "x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\")", "@requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership = Membership(term=term,", "(datetime.now() - sess.created_at) > timedelta(days = 1): sess = None setattr(g, 'sess', sess)", "real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\")", "app.config[\"BLACKLIST\"] for part in query_string.split(): matches = lambda name: part.lower() in name.lower() banned", "nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session =", "self.state == \"resolved\": return \"success\" if self.state == \"pending\": return \"warning\" class Entry:", "amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else: return self.accuracy = 1 cmd =", "from collections import defaultdict, namedtuple import re import os from flask_sqlalchemy import SQLAlchemy", "datetime, timedelta import time from pytz import timezone import pytz from functools import", "db.Column(db.Text, nullable=False) user_name = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime,", "\"Lifetime\" else: return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd:", "render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'),", "\\ .filter(Membership.account == account) \\ .filter(Membership.settled_by == None) \\ .filter(Membership.id <= max_id) \\", "assets = Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app)", "return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state", "else: abort(404) return route return decorator @app.route('/') def index(): if g.sess is None:", "db = SQLAlchemy(app) def compute_queryname(context): return context.current_parameters['name'].lower() class Membership(db.Model): id = db.Column(db.Integer, primary_key=True)", "banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id", "action, thing=None): if self.level == 'Superadmin': return True if action == 'settlement': return", "Flask, render_template, request, redirect, url_for, jsonify, session, g, abort from calendar import month_name", "= sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"],", "== \"Lifetime\") | (term == app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter def", "= request.form.getlist(\"accepted_transaction_id\") for name, term, tid in zip(names, terms, tids): if tid not", "valid elif (datetime.now() - sess.created_at) > timedelta(days = 1): sess = None setattr(g,", "True if action == 'reports': return self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM')", "now = now.replace(year=now.year-18) - timedelta(days = 1) return now def epoch(d): start =", "def index(): if g.sess is None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def", "request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string = '%' + part.lower()", "== \"resolved\": return \"success\" if self.state == \"pending\": return \"warning\" class Entry: COMMAND_PATTERN", "db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report =", "collections import defaultdict, namedtuple import re import os from flask_sqlalchemy import SQLAlchemy from", "db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id <= max_id)", "name(self): return self._name @name.setter def name(self, value): self._name = value self.queryname = value.lower()", "def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self):", "if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\",", "lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\") \\", "index(): if g.sess is None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None):", "return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]: return sessions_new(error_message=\"Name is missing\") sess = Session(", "= value self.queryname = value.lower() def is_free(self): return self.price == 0 ALPHABET =", "= re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name =", "Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form", "for part in query_string.split(): matches = lambda name: part.lower() in name.lower() banned =", "query = Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string = '%' + part.lower() +", "@app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week')", "= transaction self.memberships = memberships self.accuracy = 0 self.parse_transaction() def is_complete(self): return len(self.memberships)", "= lambda name: part.lower() in name.lower() banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships,", "def parse_transaction(self): amount = self.transaction.amount if amount == price_for_term('Current'): self.term = app.config['TERM'] elif", "# Old sessions are not valid elif (datetime.now() - sess.created_at) > timedelta(days =", "memberships self.accuracy = 0 self.parse_transaction() def is_complete(self): return len(self.memberships) > 0 def parse_transaction(self):", "os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state ==", "idx = cmd.end(0) name = self.transaction.message[idx:] name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name", "functools import wraps from flask import Flask, render_template, request, redirect, url_for, jsonify, session,", "import os from flask_sqlalchemy import SQLAlchemy from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid", "report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id) return", "is required\") if len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit() return", "== \"uploaded\": return \"\" if self.state == \"resolved\": return \"success\" if self.state ==", "= None membership.vipps_transaction_id = tid errors = [] if membership.name.strip() == '': errors.append(\"Name", "'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] = os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] =", "price_for_term('Current'): self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else: return", "names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\") for", "from calendar import month_name from collections import defaultdict, namedtuple import re import os", "None # Old sessions are not valid elif (datetime.now() - sess.created_at) > timedelta(days", "level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch',", "@app.route('/memberships/<id>/edit') def memberships_edit(id): mem = Membership.query.get(id) return render_template('memberships/edit.html', membership=mem) @app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def", "False class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime,", "request.files['file'] report = VippsReport(state=\"created\") db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index'))", "abort from calendar import month_name from collections import defaultdict, namedtuple import re import", "db.session.add(report) db.session.commit() file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report", "\\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created", "class Membership(db.Model): id = db.Column(db.Integer, primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname =", "2 else: self.name = \"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions =", "return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term", "= list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) < limit: # Search in blacklist", "limit: # Search in blacklist banned = app.config[\"BLACKLIST\"] for part in query_string.split(): matches", "return True if action == 'reports': return self.is_atleast('Admin') if action == 'sessions_list': return", "g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid = None membership.vipps_transaction_id =", "request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions = db.session.query( db.func.count(Membership.created_by), db.func.sum(Membership.price), Session )", "are not valid elif (datetime.now() - sess.created_at) > timedelta(days = 1): sess =", "report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids", "return func(*args, **kwargs) else: abort(404) return route return decorator @app.route('/') def index(): if", "created_by=g.sess.id ) membership.price = price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid =", "db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return os.path.join(app.config['VIPPS_STORAGE_PATH'], \"%05d.xlsx\" % self.id)", "logout() return redirect(url_for('index')) @app.route('/memberships/new') @requires('memberships_new') def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term',", "time from pytz import timezone import pytz from functools import wraps from flask", "'edit': if isinstance(thing, Membership): return True return False class VippsReport(db.Model): id = db.Column(db.Integer,", "term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"] db.session.commit() return redirect(url_for('vipps_index'))", "= price_for_term(membership.term) if 'vipps_transaction_id' in request.form and g.sess.can('vipps'): tid = request.form['vipps_transaction_id'].strip() if len(tid)", "update = db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id", "week}) summary = [] for term in terms: summary.append({ \"name\": term, \"rows\": terms[term],", "== 'settlement': return self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin') if action ==", "to 1 def code(self): # convert to Unix epoch epoch = int(time.mktime(self.created_at.timetuple())) code", "self.id) def transactions(self): return vippsparser.load_transactions(self.file_path()) def bootstrap_class(self): if self.state == \"created\": return \"danger\"", ".group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda: []) lifetime = 0", "render_template('vipps/show.html', report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms", "@app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def", "[] for term in terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for", "limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships) < limit:", "def memberships_new(): last_memberships = Membership.query.filter(Membership.valid_term).order_by(db.desc('created_at')).limit(10) term = request.args.get('term', app.config['TERM']) membership = Membership(term=term, account=\"Entrance\")", "import time from pytz import timezone import pytz from functools import wraps from", "VippsReport.query.get(id) names = request.form.getlist(\"name\") terms = request.form.getlist(\"term\") tids = request.form.getlist(\"transaction_id\") accepted_tids = request.form.getlist(\"accepted_transaction_id\")", "ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar", "= {} memberships = Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id not in", "which are not settled if isinstance(thing, Membership): if thing.settled_by is None: return thing.created_by", "if g.sess and g.sess.can(action): return func(*args, **kwargs) else: abort(404) return route return decorator", "= db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level)", "@app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return", "self.is_atleast('SM') if action == 'delete': # We can only delete our own memberships", "db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def memberships_search(): query_string = request.args['q'] query = Membership.query.filter(Membership.valid_term)", "= self.ALPHABET[i] + code return code @classmethod def count_dict(cls, column): query = db.session.query(column,", "errors.append(\"Name is required\") if len(errors) > 0: return render_template('memberships/new.html', membership=membership, errors=errors) db.session.add(membership) db.session.commit()", "== 'reports': return self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM') if action ==", "Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = []", "= now.replace(year=now.year-18) - timedelta(days = 1) return now def epoch(d): start = datetime.utcfromtimestamp(0)", "'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True)", "= None setattr(g, 'sess', sess) else: setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def", "banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account',", "= { 'count': sum(count for count,_,_ in sessions), 'price': sum(price for _,price,_ in", "== 'edit': if isinstance(thing, Membership): return True return False class VippsReport(db.Model): id =", "sessions_new(error_message=None): level = request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new',", "tid in zip(names, terms, tids): if tid not in accepted_tids: continue mem =", "os.path.join(app.root_path, 'vipps-reports') app.config['PASSWORDS'] = { 'F<PASSWORD>': '<PASSWORD>', 'SM': 'sm', 'Admin': 'admin', 'Superadmin': 'superadmin',", "if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account = \"Entrance\" sessions = db.session.query(", "@requires('reports') def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return", "for x in range(10)) ALPHABET += string.ascii_uppercase ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\",", "ALPHABET = ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar to 1 def", "terms[term], \"total\": sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\": term[1:]", "Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda: [])", "account = \"Entrance\" update = db.update(Membership) \\ .where(Membership.account == account) \\ .where(Membership.settled_by ==", "nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def", "count, \"year\": int(year), \"week\": week}) summary = [] for term in terms: summary.append({", "None: return render_template('index.html') else: return redirect(url_for('memberships_new')) @app.route('/sessions/new') def sessions_new(error_message=None): level = request.args['level'] description", "None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all() summary = { 'count':", "is_complete(self): return len(self.memberships) > 0 def parse_transaction(self): amount = self.transaction.amount if amount ==", "+ '%' query = query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned =", "= timezone(app.config['TIMEZONE']) assets = Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db", "app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo'", "epoch epoch = int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch > 0: epoch, i", "Closed sessions are not valid if sess.closed_at is not None: sess = None", "def epoch(d): start = datetime.utcfromtimestamp(0) return (d - start).total_seconds() return dict( localize=localize, latest_born_date=latest_born_date,", "@app.route('/memberships/<id>/delete', methods=['POST']) @requires('memberships_new') def memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit()", "Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms =", "column): query = db.session.query(column, db.func.count()).group_by(column) result = {} for row in query: result[row[0]]", "(term == \"Lifetime\") | (term == app.config['TERM']) @hybrid_property def name(self): return self._name @name.setter", "db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) def file_path(self): return", "_,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST']) @requires('settlement')", "<= max_id) \\ .join(Membership.created_session) \\ .all() summary = { 'count': sum(count for count,_,_", "return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self, transaction, memberships): self.transaction =", "description = request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level", "2000, \"sortkey\": term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return", "= Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state = request.form[\"state\"]", "for row in query: result[row[0]] = row[1] return result def price_for_term(term): if term", "for term in terms: summary.append({ \"name\": term, \"rows\": terms[term], \"total\": sum(r[\"count\"] for r", "True if action == 'settlement': return self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin')", "missing\") sess = Session( level=level, user_name=request.form[\"name\"], description=request.form.get(\"description\", \"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] =", "'sess', sess) else: setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def localize(d): if d.tzinfo", "class VippsReport(db.Model): id = db.Column(db.Integer, primary_key=True) state = db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow,", "'sqlite:///app.db' app.config['ASSETS_DEBUG'] = True app.config['SECRET_KEY'] = \"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] =", "= Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at = datetime.utcnow() db.session.commit() session[\"session_id\"] =", "import string from datetime import datetime, timedelta import time from pytz import timezone", "sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions,", "defaultdict, namedtuple import re import os from flask_sqlalchemy import SQLAlchemy from flask_assets import", "<gh_stars>0 # coding: utf-8 import string from datetime import datetime, timedelta import time", "m in memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] = [] mapping[m.vipps_transaction_id].append(m) return", "else: return self.accuracy = 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx", "memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count =", ") \\ .group_by('year', 'week', Membership.term) \\ .order_by('year', 'week') terms = defaultdict(lambda: []) lifetime", "= db.relationship(\"Session\", foreign_keys=[created_by], backref=\"created_memberships\") settled_session = db.relationship(\"Session\", foreign_keys=[settled_by], backref=\"settled_memberships\") valid_term = (term ==", "Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] =", "@app.route('/sessions/switch', methods=['POST']) def sessions_switch(): new_session = Session( level=g.sess.level, user_name=request.form[\"name\"], description=g.sess.description ) db.session.add(new_session) g.sess.closed_at", "tid = request.form['vipps_transaction_id'].strip() if len(tid) == 0: tid = None membership.vipps_transaction_id = tid", "def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html',", "def sessions_new(error_message=None): level = request.args['level'] description = request.args['description'] return render_template('sessions/new.html', level=level, description=description, error_message=error_message)", "== None) \\ .filter(Membership.id <= max_id) \\ .join(Membership.created_session) \\ .all() summary = {", "= db.Column(db.Integer, primary_key=True) description = db.Column(db.Text, nullable=False) level = db.Column(db.Text, nullable=False) user_name =", "return levels.index(self.level) >= levels.index(level) def can(self, action, thing=None): if self.level == 'Superadmin': return", "if action == 'edit': if isinstance(thing, Membership): return True return False class VippsReport(db.Model):", "re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name = name if cmd.group(1) and", "app.config['PRICE'] levels = ['Funk', 'SM', 'Admin', 'Superadmin'] class Session(db.Model): id = db.Column(db.Integer, primary_key=True)", "Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id), Membership.term,", "def sessions_list(): created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html',", "= request.args['q'] query = Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string = '%' +", "\"rows\": terms[term], \"total\": sum(r[\"count\"] for r in terms[term]), \"year\": int(term[1:]) + 2000, \"sortkey\":", "route return decorator @app.route('/') def index(): if g.sess is None: return render_template('index.html') else:", "blacklist banned = app.config[\"BLACKLIST\"] for part in query_string.split(): matches = lambda name: part.lower()", "real_password = app.config['PASSWORDS'][request.form[\"level\"]] if real_password != request.form[\"password\"]: return sessions_new(error_message=\"Wrong password\") if not request.form[\"name\"]:", "== price_for_term('Current'): self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'): self.term = \"Lifetime\" else:", "created = Membership.count_dict(Membership.created_by) settled = Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created,", "continue mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid, created_by=g.sess.id, price=price_for_term(term) ) db.session.add(mem) report.state", "flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import hybrid_property import vippsparser app = Flask(__name__)", "re import os from flask_sqlalchemy import SQLAlchemy from flask_assets import Environment, Bundle from", "'sm', 'Admin': 'admin', 'Superadmin': 'superadmin', } app.config['BLACKLIST'] = [] app.config.from_pyfile(os.getenv('CONFIG_FILE', 'production.cfg'), silent=True) tz", ".where(Membership.account == account) \\ .where(Membership.settled_by == None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id)", "1) return now def epoch(d): start = datetime.utcfromtimestamp(0) return (d - start).total_seconds() return", "Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return", "= Membership.query.filter(Membership.valid_term) for part in query_string.split(): like_string = '%' + part.lower() + '%'", "[] if membership.name.strip() == '': errors.append(\"Name is required\") if len(errors) > 0: return", "sessions are not valid if sess.closed_at is not None: sess = None #", "return self.is_atleast('SM') if action == 'settlement_all': return self.is_atleast('Admin') if action == 'wristband': return", "== None) \\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return", "os from flask_sqlalchemy import SQLAlchemy from flask_assets import Environment, Bundle from sqlalchemy.ext.hybrid import", "account=\"Entrance\") membership.price = price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create():", "tids): if tid not in accepted_tids: continue mem = Membership( name=name, term=term, account=\"Vipps\",", "setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def localize(d): if d.tzinfo is None: d", "return dict( localize=localize, latest_born_date=latest_born_date, epoch=epoch ) def logout(): session.pop('session_id') def requires(action): def decorator(func):", "created_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=False) settled_by = db.Column(db.Integer, db.ForeignKey('session.id'), nullable=True) created_session = db.relationship(\"Session\",", "membership_count: if term == \"Lifetime\": lifetime += count else: terms[term].append({\"count\": count, \"year\": int(year),", "name: part.lower() in name.lower() banned = filter(matches, banned) return render_template('memberships/table.html', memberships=memberships, banned=banned) @app.route('/memberships/settle')", "+ code return code @classmethod def count_dict(cls, column): query = db.session.query(column, db.func.count()).group_by(column) result", "None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now = datetime.now() now =", "db.session.commit() return redirect(url_for('memberships_settle', account=account)) @app.route('/memberships') @requires('memberships_list') def memberships_list(): memberships = Membership.query.all() return render_template('memberships/list.html',", "= int(time.mktime(self.created_at.timetuple())) code = \"\" while epoch > 0: epoch, i = divmod(epoch,", "= Membership.count_dict(Membership.settled_by) sessions = Session.query.order_by(db.desc('created_at')) return render_template('sessions/list.html', sessions=sessions, created=created, settled=settled) @app.route('/vipps') def vipps_index():", "memberships_destroy(id): mem = Membership.query.get(id) if g.sess.can('delete', mem): db.session.delete(mem) db.session.commit() return redirect(url_for('memberships_new')) @app.route('/memberships/search') def", "sessions), 'price': sum(price for _,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id,", "methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership( name=request.form[\"name\"], term=request.form[\"term\"], account=request.form[\"account\"], created_by=g.sess.id ) membership.price", "render_template('sessions/new.html', level=level, description=description, error_message=error_message) @app.route('/sessions/new', methods=['POST']) def sessions_create(): level = request.form[\"level\"] real_password =", "self.queryname = value.lower() def is_free(self): return self.price == 0 ALPHABET = \"\".join(str(x) for", "= 1 cmd = re.search(self.COMMAND_PATTERN, self.transaction.message, re.I) if cmd: idx = cmd.end(0) name", "transactions = list(self.transactions()) trans_ids = [t.id for t in transactions] mapping = {}", "ALPHABET\\ .replace(\"O\", \"\")\\ .replace(\"I\", \"\") # too similar to 1 def code(self): #", "db.Column(db.Text) created_at = db.Column(db.DateTime, default=datetime.utcnow, nullable=False) closed_at = db.Column(db.DateTime, nullable=True) def is_atleast(self, level):", "memberships=memberships, banned=banned) @app.route('/memberships/settle') @requires('settlement') def memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account =", "if self.state == \"pending\": return \"warning\" class Entry: COMMAND_PATTERN = r'^([vh]\\d+)|(evig|evil)' def __init__(self,", "\"development key\" app.config['TIMEZONE'] = 'Europe/Oslo' app.config['TERM'] = \"V16\" app.config['PRICE'] = 50 app.config['VIPPS_STORAGE_PATH'] =", "= Membership.query.filter(Membership.vipps_transaction_id.in_(trans_ids)) for m in memberships: if m.vipps_transaction_id not in mapping: mapping[m.vipps_transaction_id] =", "= Membership.query.all() return render_template('memberships/list.html', memberships=memberships) @app.route('/reports') @requires('reports') def reports(): membership_count = db.session.query( db.func.count(Membership.id),", "memberships_settle(): max_id = db.session.query(db.func.max(Membership.id)).scalar() if g.sess.can('settlement_all'): account = request.args.get('account', 'Entrance') else: account =", "2 if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy = 2 else: self.name =", "route(*args, **kwargs): if g.sess and g.sess.can(action): return func(*args, **kwargs) else: abort(404) return route", "return self.is_atleast('Admin') if action == 'sessions_list': return self.is_atleast('SM') if action == 'delete': #", "\"\" if self.state == \"resolved\": return \"success\" if self.state == \"pending\": return \"warning\"", "name = re.sub(r'^[^\\wæøåÆØÅ]+', '', name, re.U) name = re.sub(r'[^\\wæøåÆØÅ]+$', '', name, re.U) self.name", "db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def can(self, action, thing=None):", "price_for_term(membership.term) return render_template('memberships/new.html', membership=membership, last_memberships=last_memberships) @app.route('/memberships/new', methods=['POST']) @requires('memberships_new') def memberships_create(): membership = Membership(", "if cmd.group(1) and amount == price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and amount", "'session_id' in session: sess = Session.query.get(session['session_id']) # Closed sessions are not valid if", "membership.name.strip() == '': errors.append(\"Name is required\") if len(errors) > 0: return render_template('memberships/new.html', membership=membership,", "= query.filter(Membership.queryname.like(like_string)) limit = 10 memberships = list(query.order_by(db.desc('created_at')).limit(limit)) banned = [] if len(memberships)", "import month_name from collections import defaultdict, namedtuple import re import os from flask_sqlalchemy", "\"Unknown\"), ) db.session.add(sess) db.session.commit() session[\"session_id\"] = sess.id return redirect(url_for('index')) @app.route('/sessions/switch', methods=['POST']) def sessions_switch():", "+ str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime)", "k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query", "\"%s %s\" % (self.transaction.first_name, self.transaction.last_name) def entries(self): transactions = list(self.transactions()) trans_ids = [t.id", "session, g, abort from calendar import month_name from collections import defaultdict, namedtuple import", "if tid not in accepted_tids: continue mem = Membership( name=name, term=term, account=\"Vipps\", vipps_transaction_id=tid,", "if len(memberships) < limit: # Search in blacklist banned = app.config[\"BLACKLIST\"] for part", "@app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships = Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc())", "for _,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions, summary=summary, max_id=max_id, account=account) @app.route('/memberships/settle', methods=['POST'])", "g.sess.can('settlement_all'): account = request.form['account'] else: account = \"Entrance\" update = db.update(Membership) \\ .where(Membership.account", "Membership.query \\ .filter(Membership.term == \"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list():", "== price_for_term('Current'): self.accuracy = 2 if cmd.group(2) and amount == price_for_term('Lifetime'): self.accuracy =", "0 self.parse_transaction() def is_complete(self): return len(self.memberships) > 0 def parse_transaction(self): amount = self.transaction.amount", "nullable=True) def is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def can(self, action, thing=None): if", "== 'memberships_new': return True if action == 'reports': return self.is_atleast('Admin') if action ==", "is None: d = d.replace(tzinfo=pytz.utc) return d.astimezone(tz) def latest_born_date(): now = datetime.now() now", "'memberships_new': return True if action == 'reports': return self.is_atleast('Admin') if action == 'sessions_list':", "file.save(report.file_path()) report.state = \"uploaded\" db.session.commit() return redirect(url_for('vipps_index')) @app.route('/vipps/<id>') def vipps_show(id): report = VippsReport.query.get(id)", "int(term[1:]) + 2000, \"sortkey\": term[1:] + str(int(term[0] == 'H')) }) summary.sort(key=lambda k: k[\"sortkey\"],", "setattr(g, 'sess', sess) else: setattr(g, 'sess', None) @app.context_processor def inject_helpers(): def localize(d): if", "= self.transaction.amount if amount == price_for_term('Current'): self.term = app.config['TERM'] elif amount == price_for_term('Lifetime'):", "db.func.count(Membership.id), Membership.term, db.func.strftime('%Y', Membership.created_at).label('year'), db.func.strftime('%W', Membership.created_at).label('week') ) \\ .group_by('year', 'week', Membership.term) \\ .order_by('year',", "= db.Column(db.DateTime, nullable=True) def is_atleast(self, level): return levels.index(self.level) >= levels.index(level) def can(self, action,", "= 1): sess = None setattr(g, 'sess', sess) else: setattr(g, 'sess', None) @app.context_processor", "count,_,_ in sessions), 'price': sum(price for _,price,_ in sessions), } return render_template('memberships/settle.html', sessions=sessions,", "\"Lifetime\") \\ .order_by(Membership.created_at.desc()) return render_template('reports/lifetime.html', memberships=memberships) @app.route('/sessions') def sessions_list(): created = Membership.count_dict(Membership.created_by) settled", "= Environment(app) if 'WEBASSETS_DIR' in os.environ: assets.directory = os.getenv('WEBASSETS_DIR') db = SQLAlchemy(app) def", "@app.route('/vipps') def vipps_index(): reports = VippsReport.query.order_by(VippsReport.created_at.desc()) return render_template('vipps/index.html', reports=reports) @app.route('/vipps', methods=['POST']) def vipps_import():", "\\ .where(Membership.id <= max_id) \\ .values(settled_by=g.sess.id) \\ .values(queryname=Membership.queryname) db.session.execute(update) db.session.commit() return redirect(url_for('memberships_settle', account=account))", "primary_key=True) _name = db.Column('name', db.Text, nullable=False) queryname = db.Column(db.Text, nullable=False) price = db.Column(db.Integer,", "report=report) @app.route('/vipps/<id>', methods=['POST']) def vipps_process(id): report = VippsReport.query.get(id) names = request.form.getlist(\"name\") terms =", "result[row[0]] = row[1] return result def price_for_term(term): if term == 'Lifetime': return app.config['PRICE']", "k: k[\"sortkey\"], reverse=True) return render_template('reports.html', summary=summary, lifetime=lifetime) @app.route('/reports/lifetime') @requires('reports') def reports_lifetime(): memberships =" ]
[ "engine, char_state): char_state.ap += 260 def timeout(self, rotation, engine, char_state): char_state.ap -= 260", "Buff, LastingBuff class DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine,", "2 * 60, 20) def equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine, char_state):", "64 def dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap +=", "char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state):", "super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine,", "__init__(self): super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine, char_state): char_state.ap += 64 def", "from buff import Buff, LastingBuff class DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60,", "buff import Buff, LastingBuff class DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60, 20)", "* 60, 20) def equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap", "def __init__(self): super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine, char_state): char_state.ap += 64", "-= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def timeout(self, rotation, engine,", "def dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap += 260", "20) def equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap -= 64", "import Buff, LastingBuff class DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60, 20) def", "def equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap -= 64 def", "char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def timeout(self,", "64 def perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def timeout(self, rotation, engine, char_state):", "class DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine, char_state): char_state.ap", "char_state): char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine,", "60, 20) def equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap -=", "+= 64 def dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap", "DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine, char_state): char_state.ap +=", "dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def", "LastingBuff class DragonKiller(LastingBuff): def __init__(self): super().__init__('dragon_killer', 2 * 60, 20) def equip(self,engine, char_state):", "equip(self,engine, char_state): char_state.ap += 64 def dequip(self,engine, char_state): char_state.ap -= 64 def perform_impl(self,rotation,", "char_state.ap -= 64 def perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def timeout(self, rotation,", "def perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def timeout(self, rotation, engine, char_state): char_state.ap", "perform_impl(self,rotation, engine, char_state): char_state.ap += 260 def timeout(self, rotation, engine, char_state): char_state.ap -=" ]
[ "put delete or patch' if not isinstance(parameters,dict): return False,'Parameters must be dict' headers", "appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname self.username = username self.workflowurl", "getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except: return False,traceback.format_exc() # ins", "traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token", "result = r.json() return True,result except: return False,traceback.format_exc() # ins = WorkFlowAPiRequest() #", "timestamp = str(time.time())[:10] ori_str = timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers =", "params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except: return False,traceback.format_exc() # ins = WorkFlowAPiRequest()", "json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname =", "if not isinstance(parameters,dict): return False,'Parameters must be dict' headers = self.getrequestheader() try: r", "getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return False,'method must be one of get", "settings import time import requests import hashlib import traceback import json class WorkFlowAPiRequest(object):", "def getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest()", "= str(time.time())[:10] ori_str = timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature,", "timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return", "return False,'method must be one of get post put delete or patch' if", "workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname self.username = username self.workflowurl = workflowurl", "= self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result", "__init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname self.username = username", "dict' headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json()", "username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname self.username = username self.workflowurl =", "False,'method must be one of get post put delete or patch' if not", "= dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in", "= r.json() return True,result except: return False,traceback.format_exc() # ins = WorkFlowAPiRequest() # print", "WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname self.username", "headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except: return False,traceback.format_exc() # ins =", "= getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except: return False,traceback.format_exc() #", "timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return", "self.token = token self.appname = appname self.username = username self.workflowurl = workflowurl def", "r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except: return False,traceback.format_exc()", "ori_str = timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname,", "import requests import hashlib import traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP,", "not in ['get','post','put','delete','patch']: return False,'method must be one of get post put delete", "['get','post','put','delete','patch']: return False,'method must be one of get post put delete or patch'", "dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']:", "import settings import time import requests import hashlib import traceback import json class", "return False,'Parameters must be dict' headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers,", "in ['get','post','put','delete','patch']: return False,'method must be one of get post put delete or", "= timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username)", "workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp + self.token signature =", "token self.appname = appname self.username = username self.workflowurl = workflowurl def getrequestheader(self): timestamp", "post put delete or patch' if not isinstance(parameters,dict): return False,'Parameters must be dict'", "requests import hashlib import traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin',", "or patch' if not isinstance(parameters,dict): return False,'Parameters must be dict' headers = self.getrequestheader()", "time import requests import hashlib import traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN,", "delete or patch' if not isinstance(parameters,dict): return False,'Parameters must be dict' headers =", "try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except: return", "self.username = username self.workflowurl = workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str =", "def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname self.username =", "appname self.username = username self.workflowurl = workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str", "def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return False,'method must be one of", "import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname", "headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return False,'method must be one", "self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return True,result except:", "= token self.appname = appname self.username = username self.workflowurl = workflowurl def getrequestheader(self):", "= workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp + self.token signature", "import hashlib import traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL):", "method not in ['get','post','put','delete','patch']: return False,'method must be one of get post put", "one of get post put delete or patch' if not isinstance(parameters,dict): return False,'Parameters", "not isinstance(parameters,dict): return False,'Parameters must be dict' headers = self.getrequestheader() try: r =", "username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return False,'method must", "signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()):", "r.json() return True,result except: return False,traceback.format_exc() # ins = WorkFlowAPiRequest() # print (ins.getdata(parameters=dict(username='admin',", "self.appname = appname self.username = username self.workflowurl = workflowurl def getrequestheader(self): timestamp =", "headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result = r.json() return", "class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token = token self.appname = appname", "import traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token =", "= appname self.username = username self.workflowurl = workflowurl def getrequestheader(self): timestamp = str(time.time())[:10]", "str(time.time())[:10] ori_str = timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp,", "self.workflowurl = workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp + self.token", "of get post put delete or patch' if not isinstance(parameters,dict): return False,'Parameters must", "hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method", "be dict' headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result =", "getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp + self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers", "+ self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers", "must be one of get post put delete or patch' if not isinstance(parameters,dict):", "True,result except: return False,traceback.format_exc() # ins = WorkFlowAPiRequest() # print (ins.getdata(parameters=dict(username='admin', per_page=20, name=''),method='get',url='/api/v1.0/workflows'))", "get post put delete or patch' if not isinstance(parameters,dict): return False,'Parameters must be", "from django.conf import settings import time import requests import hashlib import traceback import", "False,'Parameters must be dict' headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data))", "= username self.workflowurl = workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp", "<gh_stars>100-1000 from django.conf import settings import time import requests import hashlib import traceback", "be one of get post put delete or patch' if not isinstance(parameters,dict): return", "django.conf import settings import time import requests import hashlib import traceback import json", "appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return False,'method", "if method not in ['get','post','put','delete','patch']: return False,'method must be one of get post", "hashlib import traceback import json class WorkFlowAPiRequest(object): def __init__(self,token=settings.WORKFLOW_TOKEN, appname=settings.WORKFLOW_APP, username='admin', workflowurl=settings.WORKFLOW_URL): self.token", "self.token signature = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def", "must be dict' headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url), headers=headers, params=parameters,timeout=timeout,data=json.dumps(data)) result", "return True,result except: return False,traceback.format_exc() # ins = WorkFlowAPiRequest() # print (ins.getdata(parameters=dict(username='admin', per_page=20,", "= hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest() headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if", "return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not in ['get','post','put','delete','patch']: return False,'method must be", "headers = dict(signature=signature, timestamp=timestamp, appname=self.appname, username=self.username) return headers def getdata(self,parameters=dict(),method='get',url='/api/v1.0/workflows/',timeout=300,data=dict()): if method not", "isinstance(parameters,dict): return False,'Parameters must be dict' headers = self.getrequestheader() try: r = getattr(requests,method)('{0}{1}'.format(self.workflowurl,url),", "patch' if not isinstance(parameters,dict): return False,'Parameters must be dict' headers = self.getrequestheader() try:", "import time import requests import hashlib import traceback import json class WorkFlowAPiRequest(object): def", "username self.workflowurl = workflowurl def getrequestheader(self): timestamp = str(time.time())[:10] ori_str = timestamp +" ]
[ "from .io import write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge')", "@click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths',", "'--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths): G = merge_filter_graphs_from_filepaths(filepaths, min_weight=min_weight)", "default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths): G = merge_filter_graphs_from_filepaths(filepaths, min_weight=min_weight) write_graph_to_filepath(G, outfile)", "def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1)", ".api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile',", "<reponame>nanusefue/CAP2-1 import click from .io import write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def", "import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'),", "write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2)", "@click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths): G = merge_filter_graphs_from_filepaths(filepaths,", "click from .io import write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass", ".io import write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m',", "import click from .io import write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli():", "type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths): G = merge_filter_graphs_from_filepaths(filepaths, min_weight=min_weight) write_graph_to_filepath(G,", "from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o',", "import write_graph_to_filepath from .api import merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight',", "default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths): G =", "merge_filter_graphs_from_filepaths @click.group('strainotype') def strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-')", "pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight,", "@strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile,", "strainotype_cli(): pass @strainotype_cli.command('merge') @click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def", "@click.option('-m', '--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths):", "'--min-weight', default=2) @click.option('-o', '--outfile', type=click.File('w'), default='-') @click.argument('filepaths', nargs=-1) def merge_graphs_cli(min_weight, outfile, filepaths): G" ]
[ ".conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner import Runner from .conanbuilder.signature", "cli_ui as ui import deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader", "import deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import", "config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible or readable\") return ConfigReader() def config_reader_from_string(load:", "conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader: ConfigReader, root: str) -> Runner:", "Runner = get_runner(self.config, root) def sources(self, source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder)", "str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str = \"\") ->", "str = \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def", "file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader: ConfigReader, root: str)", "def __init__(self, config_file_path: str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner =", "def config_reader_from_file(file: str) -> ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file))", "find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages", "import Package from .conanbuilder.runner import Runner from .conanbuilder.signature import Signature class MumocoAPI: def", "conan_packages = [] for file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def", "not accessible or readable\") return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader: ConfigReader", "or readable\") return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader: ConfigReader = deserialize.deserialize(ConfigReader,", "Package from .conanbuilder.runner import Runner from .conanbuilder.signature import Signature class MumocoAPI: def __init__(self,", "as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible or readable\") return", "ConfigReader, root: str) -> Runner: conan_factory, _, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory,", "find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages =", "root_path: str, signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for", "packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader:", "Signature class MumocoAPI: def __init__(self, config_file_path: str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path)", ".conanbuilder.runner import Runner from .conanbuilder.signature import Signature class MumocoAPI: def __init__(self, config_file_path: str,", "-> ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config", "import cli_ui as ui import deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader import", "from .conanbuilder.signature import Signature class MumocoAPI: def __init__(self, config_file_path: str, root: str): self.config:", "= [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not in", "str) -> ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError:", "Conan, root_path: str, signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = []", "ui.fatal(\"Config file not accessible or readable\") return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader:", "upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files =", "None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = [] for path in", "encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible or readable\")", "pathlib import Path from typing import List import cli_ui as ui import deserialize", "source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations)", "\"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name:", "Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string) return conan_files def", "-> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str = \"\") -> None:", "config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader: try: with open(file, encoding=\"utf-8\")", "self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def sources(self, source_folder: str", "str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = [] for", "str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str =", "\"test_package\" not in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature:", "_, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def", "str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def sources(self, source_folder:", "self.runner: Runner = get_runner(self.config, root) def sources(self, source_folder: str = \"\") -> None:", "not in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature)", "Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file:", "Runner from .conanbuilder.signature import Signature class MumocoAPI: def __init__(self, config_file_path: str, root: str):", "-> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in conan_files: conan_packages.append(Package(conan_factory,", "self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def", "password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str = \"\")", "find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader: try: with", "root) def sources(self, source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username:", "str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def", "with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible", "return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible or readable\") return ConfigReader() def", "= find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return", "username, password) def remove(self, source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self)", ".conanbuilder.signature import Signature class MumocoAPI: def __init__(self, config_file_path: str, root: str): self.config: ConfigReader", "accessible or readable\") return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader: ConfigReader =", "create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def", "= \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str) -> None:", "add_remotes(self, username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder:", "from typing import List import cli_ui as ui import deserialize from conans.client.conan_api import", "return Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader: try: with open(file, encoding=\"utf-8\") as", "List import cli_ui as ui import deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader", "= get_runner(self.config, root) def sources(self, source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder) def", "def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name)", "conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path)", "= [] for file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader:", "conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in conan_files: conan_packages.append(Package(conan_factory, signature, file))", "IOError: ui.fatal(\"Config file not accessible or readable\") return ConfigReader() def config_reader_from_string(load: str) ->", "__init__(self, config_file_path: str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config,", "-> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username,", "_ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file:", "str) -> Runner: conan_factory, _, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature)", "import Signature class MumocoAPI: def __init__(self, config_file_path: str, root: str): self.config: ConfigReader =", "file)) return conan_packages def get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory, _, _", "self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None:", "open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible or", "username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str", "import json from pathlib import Path from typing import List import cli_ui as", "ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file", "import Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner import", "Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner import Runner", "root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def sources(self,", "return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]: conan_files =", "= find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader: try:", "None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) ->", "self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) ->", "path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string) return", "= str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan,", "import ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner import Runner from .conanbuilder.signature import", "json from pathlib import Path from typing import List import cli_ui as ui", "deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package", "def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string", "[] for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not in path_string:", "conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not", "get_runner(self.config, root) def sources(self, source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self,", "str, signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file", "from conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package from", "typing import List import cli_ui as ui import deserialize from conans.client.conan_api import Conan", "signature, file)) return conan_packages def get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory, _,", "self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder) def", "file not accessible or readable\") return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader:", "def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files", "List[str]: conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\"", "conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]: conan_files", "-> List[str]: conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if", "ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader: ConfigReader = deserialize.deserialize(ConfigReader, load) return reader", "try: with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not", "def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages", "Runner: conan_factory, _, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory,", "= \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self,", "if \"test_package\" not in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str,", "for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string)", "root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str) -> ConfigReader: try: with open(file,", "import Path from typing import List import cli_ui as ui import deserialize from", "def sources(self, source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str,", "self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]:", "def get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory, _, _ = Conan.factory() packages", "from .conanbuilder.runner import Runner from .conanbuilder.signature import Signature class MumocoAPI: def __init__(self, config_file_path:", "from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner import Runner from", "Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in conan_files:", "except IOError: ui.fatal(\"Config file not accessible or readable\") return ConfigReader() def config_reader_from_string(load: str)", "path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) -> List[Package]:", "List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in conan_files: conan_packages.append(Package(conan_factory, signature,", "get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory, _, _ = Conan.factory() packages =", "conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner", "remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = []", "readable\") return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader: ConfigReader = deserialize.deserialize(ConfigReader, load)", "from .conanbuilder.package import Package from .conanbuilder.runner import Runner from .conanbuilder.signature import Signature class", "\"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes,", "[] for file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader: ConfigReader,", "config_reader_from_file(file: str) -> ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file: return config_reader_from_string(json.load(json_file)) except", "-> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = [] for path", ".conanbuilder.package import Package from .conanbuilder.runner import Runner from .conanbuilder.signature import Signature class MumocoAPI:", "class MumocoAPI: def __init__(self, config_file_path: str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner:", "find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string =", "path_string = str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory:", "conan_packages def get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory, _, _ = Conan.factory()", "json_file: return config_reader_from_string(json.load(json_file)) except IOError: ui.fatal(\"Config file not accessible or readable\") return ConfigReader()", "conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory,", "ConfigReader from .conanbuilder.package import Package from .conanbuilder.runner import Runner from .conanbuilder.signature import Signature", "in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader: ConfigReader, root: str) ->", "from pathlib import Path from typing import List import cli_ui as ui import", "ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def sources(self, source_folder: str =", "remove(self, source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all()", "as ui import deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader from", "self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str) -> List[str]: conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"):", "def add_remotes(self, username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self,", "def remove(self, source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) -> None:", "in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path: str, signature: Signature) ->", "config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def sources(self, source_folder: str = \"\") ->", "packages) def config_reader_from_file(file: str) -> ConfigReader: try: with open(file, encoding=\"utf-8\") as json_file: return", "-> Runner: conan_factory, _, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return", "return ConfigReader() def config_reader_from_string(load: str) -> ConfigReader: reader: ConfigReader = deserialize.deserialize(ConfigReader, load) return", "password) def remove(self, source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder) def create(self) ->", "-> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path:", "ui import deserialize from conans.client.conan_api import Conan from .conanbuilder.configreader import ConfigReader from .conanbuilder.package", "conan_factory, _, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages)", "str) -> List[str]: conan_files = [] for path in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute())", "str = \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str) ->", "config_file_path: str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root)", "for file in conan_files: conan_packages.append(Package(conan_factory, signature, file)) return conan_packages def get_runner(config_reader: ConfigReader, root:", "-> None: self.runner.remove_all_sources(source_folder) def create(self) -> None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str)", "return conan_packages def get_runner(config_reader: ConfigReader, root: str) -> Runner: conan_factory, _, _ =", "Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str) ->", "= config_reader_from_file(config_file_path) self.runner: Runner = get_runner(self.config, root) def sources(self, source_folder: str = \"\")", "str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string) return conan_files def find_all_packages_to_processed(conan_factory: Conan, root_path:", "source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str)", "None: self.runner.export_all() self.runner.create_all(self.config.configurations) def upload(self, remote_name: str) -> None: self.runner.upload_all_packages(remote_name) def find_all_conanfiles_to_be_processed(root_path: str)", "= Conan.factory() packages = find_all_packages_to_processed(conan_factory, root, config_reader.signature) return Runner(conan_factory, packages) def config_reader_from_file(file: str)", "MumocoAPI: def __init__(self, config_file_path: str, root: str): self.config: ConfigReader = config_reader_from_file(config_file_path) self.runner: Runner", "None: self.runner.add_all_remotes(self.config.remotes, username, password) def remove(self, source_folder: str = \"\") -> None: self.runner.remove_all_sources(source_folder)", "in Path(root_path).rglob(\"conanfile.py\"): path_string = str(path.absolute()) if \"test_package\" not in path_string: conan_files.append(path_string) return conan_files", "signature: Signature) -> List[Package]: conan_files = find_all_conanfiles_to_be_processed(root_path) conan_packages = [] for file in", "root: str) -> Runner: conan_factory, _, _ = Conan.factory() packages = find_all_packages_to_processed(conan_factory, root,", "import List import cli_ui as ui import deserialize from conans.client.conan_api import Conan from", "Path from typing import List import cli_ui as ui import deserialize from conans.client.conan_api", "None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password: str) -> None: self.runner.add_all_remotes(self.config.remotes, username, password)", "import Runner from .conanbuilder.signature import Signature class MumocoAPI: def __init__(self, config_file_path: str, root:", "sources(self, source_folder: str = \"\") -> None: self.runner.get_all_sources(source_folder) def add_remotes(self, username: str, password:" ]
[ "# Generated by Django 2.0.3 on 2018-04-05 07:55 from django.db import migrations, models", "dependencies = [ ('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ),", "operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField( model_name='kyokigo_input', name='text', field=models.CharField(max_length=100, verbose_name='テキスト'), ),", "migrations, models class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ] operations = [", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ] operations =", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ] operations", "on 2018-04-05 07:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "2018-04-05 07:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('kyokigo',", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ]", "'0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField( model_name='kyokigo_input', name='text', field=models.CharField(max_length=100,", "('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField( model_name='kyokigo_input', name='text',", "by Django 2.0.3 on 2018-04-05 07:55 from django.db import migrations, models class Migration(migrations.Migration):", "2.0.3 on 2018-04-05 07:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "Django 2.0.3 on 2018-04-05 07:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField( model_name='kyokigo_input', name='text', field=models.CharField(max_length=100, verbose_name='テキスト'),", "Generated by Django 2.0.3 on 2018-04-05 07:55 from django.db import migrations, models class", "07:55 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'),", "= [ ('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField(", "[ ('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField( model_name='kyokigo_input',", "models class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField(", "Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl',", "class Migration(migrations.Migration): dependencies = [ ('kyokigo', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='kyokigo_input',", "= [ migrations.RemoveField( model_name='kyokigo_input', name='ownurl', ), migrations.AlterField( model_name='kyokigo_input', name='text', field=models.CharField(max_length=100, verbose_name='テキスト'), ), ]" ]
[ "from db.sql.migration_of_db.tweet_migration_big.psql_tweet_mig_queries import psql_connector_twitter_mig from db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def", "migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df", "= psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t", "print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name)", "t += len(tweet_df) print('we have processed ' + str(t) + ' rows') if", "if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df)", "break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have", "import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration')", "tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have processed", "big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have processed ' + str(t)", "psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t +=", "tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df =", "from db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet migration')", "psql_connector_twitter_mig from db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet", "big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag", "table_name=table_name) t += len(tweet_df) print('we have processed ' + str(t) + ' rows')", "table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table)", "= 0 while not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if", "data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone')", "def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t =", "while not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break", "'') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df,", "#tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn", "tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we", "print('we have processed ' + str(t) + ' rows') if __name__ == \"__main__\":", "= table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df,", "import psql_connector_twitter_mig from db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start", "print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn", "= cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have processed '", "cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done')", "tweet_df = psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty: bi_table = table_name.replace('_clone', '')", "psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have processed ' + str(t) + '", "= psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0 while not", "psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df =", "db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone')", "tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig()", "psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty:", "big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty: bi_table = table_name.replace('_clone',", "tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn =", "migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig()", "table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have processed ' + str(t) +", "psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0 while", "as cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job", "db.sql.migration_of_db.tweet_migration_big.psql_tweet_mig_queries import psql_connector_twitter_mig from db.sql.migration_of_db.tweet_migration_big.big_queries_sql import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables():", "cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name) t += len(tweet_df) print('we have processed ' +", "done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t", "print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn =", "not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df", "+= len(tweet_df) print('we have processed ' + str(t) + ' rows') if __name__", "bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df)", "= big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty: bi_table =", "tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df =", "migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def tweet_mig(table_name):", "import big_connector_twitter_mig import data_collection.altdata_service.twitter.object_function.tweet_cleaner as cleaner def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start", "0 while not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty:", "big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty: bi_table", "def migration_tweet_tables(): print('start tweet migration') #tweet_mig(table_name='tweet_clone') print('start tweet_castag migration') tweet_mig(table_name='tweet_cashtag_clone') print('job done') def", "len(tweet_df) print('we have processed ' + str(t) + ' rows') if __name__ ==", "tweet_df = psql_conn.get_twitter(table_name) if tweet_df.empty: break tweet_df = cleaner.clean_df_for_db(tweet_df) big_conn.insert_into_tweet(df=tweet_df, table_name=bi_table) psql_conn.delete_imported_tweets(df=tweet_df, table_name=table_name)", "tweet_mig(table_name): psql_conn = psql_connector_twitter_mig() big_conn = big_connector_twitter_mig() tweet_df = psql_conn.get_twitter(table_name) t = 0", "have processed ' + str(t) + ' rows') if __name__ == \"__main__\": migration_tweet_tables()", "= psql_conn.get_twitter(table_name) t = 0 while not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df", "t = 0 while not tweet_df.empty: bi_table = table_name.replace('_clone', '') tweet_df = psql_conn.get_twitter(table_name)" ]
[ "secret_time = cover_time return cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP", "None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check if empty if len(self.rtcp.ingress_buffer) <=", "receive ACK while not res: #print(rto) # Wait for ack event or timeout", "= time.time() - rtt rttvar = srtt / 2 rto = srtt +", "listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def accept(self):", "not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass def send(self, data):", "waiting to new pushed data in the buffer :return: \"\"\" data = None", "# Destination port self.dst = host # Destination host self.rprob = rprob self.rtcp", "0 rto = 1 alpha = 1/8 beta = 1/4 first_measurement = True", "cover_time return cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data buffer", "srtt + max(g, k * rttvar) # Send cover else: chunk = cover[:1446]", "rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport # Source port, defaults", "data and the secret according to the MSS. The data and secret will", "# Do the same for the secret secret_chunks = [] interval = 1444", "wait_and_recv(self): \"\"\"Waits until end_event is set before accessing to the data buffer.\"\"\" data", "length # Slice the binary data in chunks the size of the payload", "timeout as expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt =", "received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt /", "for RstegTcp that offers socket primitives for communicating like Python sockets.\"\"\" def __init__(self,", "interval]) self.rtcp.secret_chunks = secret_chunks n = 0 start_time = time.time() # RTO vars", "-*- coding: UTF-8 -*- # Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from", "False # while we don't receive ACK while not res: #print(rto) # Wait", "rttvar = (1- beta) * rttvar + beta * abs(srtt - (time.time() -", "the secret according to the MSS. The data and secret will be sent", "False # Socket is listening on sport def bind(self, host, port): \"\"\"Configures the", "* rttvar + beta * abs(srtt - (time.time() - rtt)) srtt = (1", "received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt/2 rto", "def accept(self): \"\"\"Waits for a established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass", "RstegTCP data buffer for new recv data. :param size: integer for the data", "* 1444) + ' secret bytes') secret_time = cover_time return cover_time, secret_time def", "int(k * rttvar)) first_measurement = False else: rttvar = (1 - beta) *", "srtt / 2 rto = srtt + max(g, int(k * rttvar)) first_measurement =", "#print(rto) # Wait for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not", "secret will be sent to the TCP receiver with the RSTEG method. :param", "with the parameters supplied.\"\"\" self.dst = host self.sport = port self.rtcp.sport = self.sport", "delivered.') #print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time =", "data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0]))", "while we don't receive ACK while not res: #print(rto) # Wait for ack", "self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits for a established TCP", "UTF-8 -*- # Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from utils import", "= self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data =", "= RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index = 0 # Flags self.listening =", "secret: binary data to transmit during fake retransmission \"\"\" # Do the same", "secret secret_chunks = [] interval = 1444 for n in range(0, len(secret), interval):", "self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True", "chunk length # Slice the binary data in chunks the size of the", "else: data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else:", "secret!') #print('# Delivered ' + str(n * 1444) + ' secret bytes') secret_time", "str(len(data[1])) + ' SECRET BYTES') return data def close(self): \"\"\"Closes the TCP stream.\"\"\"", "<<EMAIL>> from rsteg_tcp import RstegTcp from utils import State, retrans_prob import time class", "- (time.time() - rtt)) srtt = (1 - alpha) * srtt + alpha", "secret has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal =", "def bind(self, host, port): \"\"\"Configures the socket with the parameters supplied.\"\"\" self.dst =", "= cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data without signal # set timer", "host self.sport = port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\"", "#print('# Cover Transfer time: %.2f' % round(time.time() - start_time, 2)) cover_time = round(time.time()", "max(g, k * rttvar) # Update secret_signal flag according to the retrans_prob except", "host self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index =", "self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt", "self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass def send(self, data): \"\"\"Chunks the", "data else: # if buffer is empty return None return data def wait_and_recv(self):", "while self.rtcp.state != State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes a TCP connection", "rttvar = (1 - beta) * rttvar + beta * abs(srtt - (time.time()", "seconds for waiting to new pushed data in the buffer :return: \"\"\" data", "def send(self, data): \"\"\"Chunks the data according to MSS and sends it to", "first_measurement = False else: rttvar = (1- beta) * rttvar + beta *", "1 else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt", "+ interval]) # RTO vars k = 4 g = 0.05 srtt =", "res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if", ":return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check if", "don't receive ACK while not res: #print(rto) # Wait for ack event or", "= srtt + max(g, k * rttvar) # Update secret_signal flag according to", "1 alpha = 1/8 beta = 1/4 first_measurement = True # Send cover", "before accessing to the data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer)", "end_event is set before accessing to the data buffer.\"\"\" data = [] self.rtcp.end_event.wait()", "to the TCP receiver with the RSTEG method. :param cover: binary data to", "(time.time() - rtt) rto = srtt + max(g, k * rttvar) # Send", "or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk) else: #", "= self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement:", "+ str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) +", "!= State.ESTAB: pass def send(self, data): \"\"\"Chunks the data according to MSS and", "binary data in chunks the size of the payload length for n in", "%.2f' % round(time.time() - start_time, 2)) cover_time = round(time.time() - start_time, 2) if", "self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer", "= round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer", "!= 0: # check if empty if len(self.rtcp.ingress_buffer) <= size: # length =", "= 1/8 beta = 1/4 first_measurement = True # Send chunks for chunk", "dport # Destination port self.dst = host # Destination host self.rprob = rprob", "False break #print('# Cover Transfer time: %.2f' % round(time.time() - start_time, 2)) cover_time", "# ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar =", "srtt + alpha * (time.time() - rtt) rto = srtt + max(g, k", "# take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size] #", "+ alpha * (time.time() - rtt) rto = srtt + max(g, k *", "port): \"\"\"Configures the socket with the parameters supplied.\"\"\" self.dst = host self.sport =", "buffer for new recv data. :param size: integer for the data read size", "size, timeout=0): \"\"\"Reads the RstegTCP data buffer for new recv data. :param size:", "= cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt = time.time()", "interval = 1444 for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks", "to the data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV '", "if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt =", "signal # set timer rtt = time.time() res = False # while we", "chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: # if buffer is empty return", "import time class RstegSocket: \"\"\"A wrapper for RstegTcp that offers socket primitives for", "rtt) rto = srtt + max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the", "= cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt = time.time() res = False", "the parameters supplied.\"\"\" self.dst = host self.sport = port self.rtcp.sport = self.sport def", "Wait for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #", "the secret!') #print('# Delivered ' + str(n * 1444) + ' secret bytes')", "#!/usr/bin/python3 # -*- coding: UTF-8 -*- # Author: <NAME> <<EMAIL>> from rsteg_tcp import", "if not res: # timeout as expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear()", "# RTO vars k = 4 g = 0.05 srtt = 0 rttvar", "receiver with the RSTEG method. :param cover: binary data to transmit as cover", "retrans_prob except if the secret has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal =", "* srtt + alpha * (time.time() - rtt) rto = srtt + max(g,", "start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer time: %.2f' %", "= cover_time return cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data", "host on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB:", "socket with the parameters supplied.\"\"\" self.dst = host self.sport = port self.rtcp.sport =", "True # Send cover while len(cover) > 0: # Send cover signal and", "to the MSS. The data and secret will be sent to the TCP", "rttvar + beta * abs(srtt - (time.time() - rtt)) srtt = (1 -", "# Send cover while len(cover) > 0: # Send cover signal and secret", "0.05 srtt = 0 rttvar = 0 rto = 1 alpha = 1/8", "data according to MSS and sends it to the TCP receiver.\"\"\" data_chunks =", "- rtt) rto = srtt + max(g, k * rttvar) # Send cover", "= 0 rto = 1 alpha = 1/8 beta = 1/4 first_measurement =", "timeout: seconds for waiting to new pushed data in the buffer :return: \"\"\"", "1/4 first_measurement = True # Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk) #", "2) else: #print('# Cover data ended before delivering all the secret!') #print('# Delivered", "for communicating like Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\"", "while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as", "- start_time, 2) else: #print('# Cover data ended before delivering all the secret!')", "n in range(0, len(data), interval): data_chunks.append(data[n:n + interval]) # RTO vars k =", "sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport # Source port, defaults to 49512 self.dport", "# timeout as expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt", "= False while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res: #", "data. :param size: integer for the data read size :param timeout: seconds for", "recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data buffer for new recv data. :param", "<= size: # length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer", "according to MSS and sends it to the TCP receiver.\"\"\" data_chunks = []", "sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport #", "Source port, defaults to 49512 self.dport = dport # Destination port self.dst =", "return data else: data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return", "srtt + max(g, k * rttvar) # Update secret_signal flag according to the", "not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as expected", "1/4 first_measurement = True # Send cover while len(cover) > 0: # Send", "-*- # Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from utils import State,", "module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits for a established", "sent to the TCP receiver with the RSTEG method. :param cover: binary data", "rtt) rto = srtt + max(g, k * rttvar) # Send cover else:", "empty if len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] #", "self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: # if buffer is empty return None", "def wait_and_recv(self): \"\"\"Waits until end_event is set before accessing to the data buffer.\"\"\"", "self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer:", "1 alpha = 1/8 beta = 1/4 first_measurement = True # Send chunks", "ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else:", "# timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time()", "= [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + ' BYTES')", "length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return", "= time.time() - rtt rttvar = srtt/2 rto = srtt + max(g, int(k*rttvar))", "str(n * 1444) + ' secret bytes') secret_time = cover_time return cover_time, secret_time", "rtt rttvar = srtt/2 rto = srtt + max(g, int(k*rttvar)) first_measurement = False", "#ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt/2", "self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt", "srtt = (1 - alpha) * srtt + alpha * (time.time() - rtt)", "= self.rtcp.ingress_buffer[size:] return data else: # if buffer is empty return None return", "for the secret secret_chunks = [] interval = 1444 for n in range(0,", "for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout", "\"\"\"Waits until end_event is set before accessing to the data buffer.\"\"\" data =", "interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n = 0 start_time = time.time()", "the TCP receiver with the RSTEG method. :param cover: binary data to transmit", "connect(self, host, port): \"\"\"Establishes a TCP connection with the host on port.\"\"\" if", "data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check if empty if", "round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer time:", "RstegTcp that offers socket primitives for communicating like Python sockets.\"\"\" def __init__(self, rprob,", "# Update secret_signal flag according to the retrans_prob except if the secret has", "as expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time()", "rttvar = srtt/2 rto = srtt + max(g, int(k*rttvar)) first_measurement = False else:", "RSTEG method. :param cover: binary data to transmit as cover :param secret: binary", "else: #print('# Cover data ended before delivering all the secret!') #print('# Delivered '", "self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if", "' SECRET BYTES') return data def close(self): \"\"\"Closes the TCP stream.\"\"\" self.rtcp.close() while", "for n in range(0, len(data), interval): data_chunks.append(data[n:n + interval]) # RTO vars k", "# data with signal rtt = time.time() res = False while not res:", "signal rtt = time.time() res = False while not res: #print(rto) res =", "bytes') secret_time = cover_time return cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads the", "= dport # Destination port self.dst = host # Destination host self.rprob =", "abs(srtt - (time.time() - rtt)) srtt = (1 - alpha) * srtt +", "# check if empty if len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer) data", "rtt)) srtt = (1 - alpha) * srtt + alpha * (time.time() -", "the host on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state !=", "[] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + ' BYTES') if", "= self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening =", "+ ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + ' SECRET", "self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass def send(self, data): \"\"\"Chunks the data", "%.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2) else:", "def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport # Source", "res: #print(rto) # Wait for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if", "return data def wait_and_recv(self): \"\"\"Waits until end_event is set before accessing to the", "= (1- beta) * rttvar + beta * abs(srtt - (time.time() - rtt))", "retrans_prob import time class RstegSocket: \"\"\"A wrapper for RstegTcp that offers socket primitives", "time.time() res = False # while we don't receive ACK while not res:", "timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() -", "secret_signal flag according to the retrans_prob except if the secret has been sent.", "= retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('# Cover Transfer time: %.2f' %", "Destination host self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index", "for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk)", "self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check if empty if len(self.rtcp.ingress_buffer) <= size:", "BYTES') return data def close(self): \"\"\"Closes the TCP stream.\"\"\" self.rtcp.close() while self.rtcp.state !=", "instance self.f_index = 0 # Flags self.listening = False # Socket is listening", "same for the secret secret_chunks = [] interval = 1444 for n in", "import State, retrans_prob import time class RstegSocket: \"\"\"A wrapper for RstegTcp that offers", "else: rttvar = (1- beta) * rttvar + beta * abs(srtt - (time.time()", "0 rttvar = 0 rto = 1 alpha = 1/8 beta = 1/4", "transmit during fake retransmission \"\"\" # Do the same for the secret secret_chunks", "1444 for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks", "before delivering all the secret!') #print('# Delivered ' + str(n * 1444) +", "return cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data buffer for", "ended before delivering all the secret!') #print('# Delivered ' + str(n * 1444)", "not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('# Cover Transfer", "srtt = 0 rttvar = 0 rto = 1 alpha = 1/8 beta", "def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data buffer for new recv data.", "None return data def wait_and_recv(self): \"\"\"Waits until end_event is set before accessing to", "ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt", "first_measurement = False else: rttvar = (1 - beta) * rttvar + beta", "it to the TCP receiver.\"\"\" data_chunks = [] interval = 1446 # payload", "rto = srtt + max(g, k * rttvar) # Send cover else: chunk", "data_chunks = [] interval = 1446 # payload chunk length # Slice the", "cover = cover[1446:] self.rtcp.send_data(chunk) # data without signal # set timer rtt =", "fake retransmission \"\"\" # Do the same for the secret secret_chunks = []", "State.ESTAB: pass def send(self, data): \"\"\"Chunks the data according to MSS and sends", "interval]) # RTO vars k = 4 g = 0.05 srtt = 0", "self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index = 0", "- rtt) rto = srtt + max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks", "srtt = time.time() - rtt rttvar = srtt / 2 rto = srtt", "receiver.\"\"\" data_chunks = [] interval = 1446 # payload chunk length # Slice", "* rttvar) # Update secret_signal flag according to the retrans_prob except if the", "' + str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1]))", "(time.time() - rtt)) srtt = (1 - alpha) * srtt + alpha *", "parameters supplied.\"\"\" self.dst = host self.sport = port self.rtcp.sport = self.sport def listen(self):", "first_measurement = True # Send cover while len(cover) > 0: # Send cover", "the MSS. The data and secret will be sent to the TCP receiver", "Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time,", "return data else: # if buffer is empty return None return data def", "the RSTEG method. :param cover: binary data to transmit as cover :param secret:", "self.rtcp.secret_signal = False break #print('# Cover Transfer time: %.2f' % round(time.time() - start_time,", "established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes", "#print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime", "- rtt rttvar = srtt / 2 rto = srtt + max(g, int(k", "send(self, data): \"\"\"Chunks the data according to MSS and sends it to the", "payload chunk length # Slice the binary data in chunks the size of", "set before accessing to the data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer:", "socket primitives for communicating like Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512):", "= srtt / 2 rto = srtt + max(g, int(k * rttvar)) first_measurement", "self.listening = True def accept(self): \"\"\"Waits for a established TCP connection.\"\"\" while self.rtcp.state", "round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover", "else: self.rtcp.secret_signal = False break #print('# Cover Transfer time: %.2f' % round(time.time() -", "False while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout", "rttvar = 0 rto = 1 alpha = 1/8 beta = 1/4 first_measurement", "+ max(g, k * rttvar) # Update secret_signal flag according to the retrans_prob", "on sport def bind(self, host, port): \"\"\"Configures the socket with the parameters supplied.\"\"\"", "rtt = time.time() res = False while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto)", "signal and secret if self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) #", "for a established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def connect(self, host,", "sends it to the TCP receiver.\"\"\" data_chunks = [] interval = 1446 #", "data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + '", "The data and secret will be sent to the TCP receiver with the", "with the host on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state", "= 4 g = 0.05 srtt = 0 rttvar = 0 rto =", "to transmit as cover :param secret: binary data to transmit during fake retransmission", "+ beta * abs(srtt - (time.time() - rtt)) srtt = (1 - alpha)", "self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('# Cover Transfer time:", "cover_time = round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret", "new recv data. :param size: integer for the data read size :param timeout:", "True # Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk) # set timer rtt", "- alpha) * srtt + alpha * (time.time() - rtt) rto = srtt", "to new pushed data in the buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout)", "len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk", "timer rtt = time.time() res = False # while we don't receive ACK", "binary data to transmit during fake retransmission \"\"\" # Do the same for", "time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2)", "# -*- coding: UTF-8 -*- # Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp", "pass def send(self, data): \"\"\"Chunks the data according to MSS and sends it", "= self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer =", "= cover[1446:] self.rtcp.send_data(chunk) # data without signal # set timer rtt = time.time()", "secret_chunks n = 0 start_time = time.time() # RTO vars k = 4", "secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n = 0 start_time = time.time() #", "the binary data in chunks the size of the payload length for n", "Destination port self.dst = host # Destination host self.rprob = rprob self.rtcp =", "the retrans_prob except if the secret has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal", "= host # Destination host self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob) #", "in range(0, len(data), interval): data_chunks.append(data[n:n + interval]) # RTO vars k = 4", "data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + ' SECRET BYTES') return data def close(self):", "self.rtcp.send_data(chunk) # data without signal # set timer rtt = time.time() res =", "# Source port, defaults to 49512 self.dport = dport # Destination port self.dst", "data with signal rtt = time.time() res = False while not res: #print(rto)", "\"\"\"Chunks the data and the secret according to the MSS. The data and", "self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('# Cover Transfer time: %.2f'", "beta) * rttvar + beta * abs(srtt - (time.time() - rtt)) srtt =", "res: # timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if first_measurement: srtt =", "+ ' secret bytes') secret_time = cover_time return cover_time, secret_time def recv(self, size,", "beta * abs(srtt - (time.time() - rtt)) srtt = (1 - alpha) *", "% round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2) else: #print('#", "host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport # Source port, defaults to", "if self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data with signal", "self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time,", "#timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt", "# Flags self.listening = False # Socket is listening on sport def bind(self,", "data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV '", "else: # if buffer is empty return None return data def wait_and_recv(self): \"\"\"Waits", "data): \"\"\"Chunks the data according to MSS and sends it to the TCP", "first_measurement: srtt = time.time() - rtt rttvar = srtt / 2 rto =", "= True # Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk) # set timer", ":param size: integer for the data read size :param timeout: seconds for waiting", "data ended before delivering all the secret!') #print('# Delivered ' + str(n *", "timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk) else: # ack", "- rtt) rto = srtt + max(g, k * rttvar) # Update secret_signal", "= rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index = 0 #", "secret_time = round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover data ended before delivering", "port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start()", "rto = srtt + max(g, int(k * rttvar)) first_measurement = False else: rttvar", "supplied.\"\"\" self.dst = host self.sport = port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts", "data buffer for new recv data. :param size: integer for the data read", "from utils import State, retrans_prob import time class RstegSocket: \"\"\"A wrapper for RstegTcp", "self.sport = port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob,", "transmit as cover :param secret: binary data to transmit during fake retransmission \"\"\"", "rttvar) # Send cover else: chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) #", "alpha = 1/8 beta = 1/4 first_measurement = True # Send chunks for", "\"\"\"Establishes a TCP connection with the host on port.\"\"\" if not self.listening: self.listen()", "chunks for chunk in data_chunks: self.rtcp.send_data(chunk) # set timer rtt = time.time() res", "will be sent to the TCP receiver with the RSTEG method. :param cover:", "return None return data def wait_and_recv(self): \"\"\"Waits until end_event is set before accessing", "data to transmit as cover :param secret: binary data to transmit during fake", "0: # check if empty if len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer)", "\"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check if empty", "#print('# Delivered ' + str(n * 1444) + ' secret bytes') secret_time =", "RstegSocket: \"\"\"A wrapper for RstegTcp that offers socket primitives for communicating like Python", "data_chunks.append(data[n:n + interval]) # RTO vars k = 4 g = 0.05 srtt", "RstegTcp from utils import State, retrans_prob import time class RstegSocket: \"\"\"A wrapper for", "# Wait for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res:", "self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size]", "and secret will be sent to the TCP receiver with the RSTEG method.", "TCP receiver.\"\"\" data_chunks = [] interval = 1446 # payload chunk length #", "secret if self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data with", "the RstegTCP data buffer for new recv data. :param size: integer for the", "offers socket primitives for communicating like Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None,", "= False else: rttvar = (1- beta) * rttvar + beta * abs(srtt", "data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data", "on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass", "according to the MSS. The data and secret will be sent to the", "data else: data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data", "#print('Secret successfully delivered.') #print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2))", "accept(self): \"\"\"Waits for a established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def", "n in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n =", "self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass def send(self, data): \"\"\"Chunks", "# Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from utils import State, retrans_prob", "the socket with the parameters supplied.\"\"\" self.dst = host self.sport = port self.rtcp.sport", "cover :param secret: binary data to transmit during fake retransmission \"\"\" # Do", "# Socket is listening on sport def bind(self, host, port): \"\"\"Configures the socket", "self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt/2 rto =", "len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else:", "the data read size :param timeout: seconds for waiting to new pushed data", "* rttvar)) first_measurement = False else: rttvar = (1 - beta) * rttvar", "= 0 # Flags self.listening = False # Socket is listening on sport", "accessing to the data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV", "res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk) else: # ack received", "MSS. The data and secret will be sent to the TCP receiver with", "= self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear()", "chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data without signal # set", "data and secret will be sent to the TCP receiver with the RSTEG", "while self.rtcp.state != State.ESTAB: pass def send(self, data): \"\"\"Chunks the data according to", "MSS and sends it to the TCP receiver.\"\"\" data_chunks = [] interval =", "set timer rtt = time.time() res = False # while we don't receive", "Slice the binary data in chunks the size of the payload length for", "the payload length for n in range(0, len(data), interval): data_chunks.append(data[n:n + interval]) #", "the data and the secret according to the MSS. The data and secret", "# if buffer is empty return None return data def wait_and_recv(self): \"\"\"Waits until", "self.rtcp.send_data(chunk) # data with signal rtt = time.time() res = False while not", "print('RECV ' + str(len(data[1])) + ' SECRET BYTES') return data def close(self): \"\"\"Closes", "method. :param cover: binary data to transmit as cover :param secret: binary data", "if buffer is empty return None return data def wait_and_recv(self): \"\"\"Waits until end_event", "in the buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0:", "\"\"\"Waits for a established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def connect(self,", "chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt =", "k * rttvar) # Send cover else: chunk = cover[:1446] cover = cover[1446:]", "= round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover data ended before delivering all", "Rsteg_Tcp instance self.f_index = 0 # Flags self.listening = False # Socket is", "in chunks the size of the payload length for n in range(0, len(data),", "time.time() - rtt rttvar = srtt / 2 rto = srtt + max(g,", "cover[1446:] self.rtcp.send_data(chunk) # data without signal # set timer rtt = time.time() res", "utils import State, retrans_prob import time class RstegSocket: \"\"\"A wrapper for RstegTcp that", "2 rto = srtt + max(g, int(k * rttvar)) first_measurement = False else:", "+ interval]) self.rtcp.secret_chunks = secret_chunks n = 0 start_time = time.time() # RTO", ":param secret: binary data to transmit during fake retransmission \"\"\" # Do the", "secret bytes') secret_time = cover_time return cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads", "for the data read size :param timeout: seconds for waiting to new pushed", "+= 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar =", "delivering all the secret!') #print('# Delivered ' + str(n * 1444) + '", "to the retrans_prob except if the secret has been sent. if not self.rtcp.secret_sent:", "' secret bytes') secret_time = cover_time return cover_time, secret_time def recv(self, size, timeout=0):", "Cover Transfer time: %.2f' % round(time.time() - start_time, 2)) cover_time = round(time.time() -", "49512 self.dport = dport # Destination port self.dst = host # Destination host", "* abs(srtt - (time.time() - rtt)) srtt = (1 - alpha) * srtt", "n = 0 start_time = time.time() # RTO vars k = 4 g", "buffer is empty return None return data def wait_and_recv(self): \"\"\"Waits until end_event is", "res: # timeout as expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if first_measurement:", "+ str(n * 1444) + ' secret bytes') secret_time = cover_time return cover_time,", "all the secret!') #print('# Delivered ' + str(n * 1444) + ' secret", "Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk) # set timer rtt = time.time()", "# while we don't receive ACK while not res: #print(rto) # Wait for", "we don't receive ACK while not res: #print(rto) # Wait for ack event", "not res: # timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if first_measurement: srtt", "time.time() # RTO vars k = 4 g = 0.05 srtt = 0", "print('RECV ' + str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' +", "1/8 beta = 1/4 first_measurement = True # Send cover while len(cover) >", "time.time() - rtt rttvar = srtt/2 rto = srtt + max(g, int(k*rttvar)) first_measurement", "the secret has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal", "or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received", "res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() -", "if not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass def send(self,", "# Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk) # set timer rtt =", ":param cover: binary data to transmit as cover :param secret: binary data to", "# Destination host self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance", "host, port): \"\"\"Establishes a TCP connection with the host on port.\"\"\" if not", "= 1/8 beta = 1/4 first_measurement = True # Send cover while len(cover)", "len(cover) > 0: # Send cover signal and secret if self.rtcp.secret_signal: chunk =", "length for n in range(0, len(data), interval): data_chunks.append(data[n:n + interval]) # RTO vars", "event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk) else:", "State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes a TCP connection with the host", "start_time = time.time() # RTO vars k = 4 g = 0.05 srtt", "alpha * (time.time() - rtt) rto = srtt + max(g, k*rttvar) def rsend(self,", "port, defaults to 49512 self.dport = dport # Destination port self.dst = host", "if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('# Cover", "like Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport =", "\"\"\"Configures the socket with the parameters supplied.\"\"\" self.dst = host self.sport = port", "rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index = 0 # Flags", "- start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer time: %.2f'", "RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits for a", "2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover data ended before", "to the TCP receiver.\"\"\" data_chunks = [] interval = 1446 # payload chunk", "with the RSTEG method. :param cover: binary data to transmit as cover :param", "= srtt + max(g, int(k * rttvar)) first_measurement = False else: rttvar =", "beta = 1/4 first_measurement = True # Send chunks for chunk in data_chunks:", "time: %.2f' % round(time.time() - start_time, 2)) cover_time = round(time.time() - start_time, 2)", "integer for the data read size :param timeout: seconds for waiting to new", "= 1/4 first_measurement = True # Send cover while len(cover) > 0: #", "2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime", "size :param timeout: seconds for waiting to new pushed data in the buffer", "self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index = 0 # Flags self.listening", "= 1446 # payload chunk length # Slice the binary data in chunks", "self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits for a established TCP connection.\"\"\" while", "= None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check if empty if len(self.rtcp.ingress_buffer)", "# Rsteg_Tcp instance self.f_index = 0 # Flags self.listening = False # Socket", "cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt = time.time() res = False while", "constructor.\"\"\" self.sport = sport # Source port, defaults to 49512 self.dport = dport", "# length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:]", "take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: # if buffer is empty", "rto = srtt + max(g, k * rttvar) # Update secret_signal flag according", "start_time, 2)) cover_time = round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.')", "recv data. :param size: integer for the data read size :param timeout: seconds", "range(0, len(data), interval): data_chunks.append(data[n:n + interval]) # RTO vars k = 4 g", "timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear()", "= [] interval = 1446 # payload chunk length # Slice the binary", "while not res: #print(rto) # Wait for ack event or timeout res =", "self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:]", "cover while len(cover) > 0: # Send cover signal and secret if self.rtcp.secret_signal:", "size of the payload length for n in range(0, len(data), interval): data_chunks.append(data[n:n +", "self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt", "without signal # set timer rtt = time.time() res = False # while", "first_measurement: srtt = time.time() - rtt rttvar = srtt/2 rto = srtt +", "data = self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: #", "in data_chunks: self.rtcp.send_data(chunk) # set timer rtt = time.time() res = False #", "= 1444 for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks =", "read size :param timeout: seconds for waiting to new pushed data in the", "Send cover signal and secret if self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:]", ":param timeout: seconds for waiting to new pushed data in the buffer :return:", "= time.time() res = False # while we don't receive ACK while not", "# Slice the binary data in chunks the size of the payload length", "* (time.time() - rtt) rto = srtt + max(g, k * rttvar) #", "+ str(len(data[1])) + ' SECRET BYTES') return data def close(self): \"\"\"Closes the TCP", "data def close(self): \"\"\"Closes the TCP stream.\"\"\" self.rtcp.close() while self.rtcp.state != State.TIME_WAIT: pass", "self.f_index = 0 # Flags self.listening = False # Socket is listening on", "srtt + max(g, int(k * rttvar)) first_measurement = False else: rttvar = (1", "= len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data", "binary data to transmit as cover :param secret: binary data to transmit during", "* (time.time() - rtt) rto = srtt + max(g, k*rttvar) def rsend(self, cover,", "+ max(g, k * rttvar) # Send cover else: chunk = cover[:1446] cover", "sport # Source port, defaults to 49512 self.dport = dport # Destination port", "rtt = time.time() res = False # while we don't receive ACK while", "Send cover else: chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data without", "(1 - alpha) * srtt + alpha * (time.time() - rtt) rto =", "# take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: # if buffer is", "not res: #print(rto) # Wait for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto)", "else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar =", "rto = srtt + max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the data", "self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV", "k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the data and the secret according to", "rttvar = srtt / 2 rto = srtt + max(g, int(k * rttvar))", "= self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: # if", "' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + ' SECRET BYTES')", "SECRET BYTES') return data def close(self): \"\"\"Closes the TCP stream.\"\"\" self.rtcp.close() while self.rtcp.state", "g = 0.05 srtt = 0 rttvar = 0 rto = 1 alpha", "flag according to the retrans_prob except if the secret has been sent. if", "according to the retrans_prob except if the secret has been sent. if not", "self.rtcp.secret_chunks = secret_chunks n = 0 start_time = time.time() # RTO vars k", "for chunk in data_chunks: self.rtcp.send_data(chunk) # set timer rtt = time.time() res =", "chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size] # take chunk", "cover_time, secret_time def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data buffer for new", "data in the buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) !=", "max(g, k * rttvar) # Send cover else: chunk = cover[:1446] cover =", "host, port): \"\"\"Configures the socket with the parameters supplied.\"\"\" self.dst = host self.sport", "if first_measurement: srtt = time.time() - rtt rttvar = srtt / 2 rto", "class RstegSocket: \"\"\"A wrapper for RstegTcp that offers socket primitives for communicating like", "connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes a TCP", "if the secret has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else:", "len(data), interval): data_chunks.append(data[n:n + interval]) # RTO vars k = 4 g =", "self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar", "#print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as expected self.rtcp.send_secret() n", "if len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take", "time class RstegSocket: \"\"\"A wrapper for RstegTcp that offers socket primitives for communicating", "wrapper for RstegTcp that offers socket primitives for communicating like Python sockets.\"\"\" def", "= 0 rttvar = 0 rto = 1 alpha = 1/8 beta =", "= self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as expected self.rtcp.send_secret() n += 1", "# payload chunk length # Slice the binary data in chunks the size", "expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() -", "data without signal # set timer rtt = time.time() res = False #", "interval): data_chunks.append(data[n:n + interval]) # RTO vars k = 4 g = 0.05", "ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout self.rtcp.retrans_data(chunk)", "rtt rttvar = srtt / 2 rto = srtt + max(g, int(k *", "Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from utils import State, retrans_prob import", "res = False while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res:", "- start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover data", "srtt + max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the data and the", "len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n = 0 start_time =", "the buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: #", "port) while self.rtcp.state != State.ESTAB: pass def send(self, data): \"\"\"Chunks the data according", "Send cover while len(cover) > 0: # Send cover signal and secret if", "\"\"\"A wrapper for RstegTcp that offers socket primitives for communicating like Python sockets.\"\"\"", "0: # Send cover signal and secret if self.rtcp.secret_signal: chunk = cover[:1414] cover", "> 0: # Send cover signal and secret if self.rtcp.secret_signal: chunk = cover[:1414]", "is listening on sport def bind(self, host, port): \"\"\"Configures the socket with the", "payload length for n in range(0, len(data), interval): data_chunks.append(data[n:n + interval]) # RTO", "cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt = time.time() res", "1/8 beta = 1/4 first_measurement = True # Send chunks for chunk in", "max(g, int(k * rttvar)) first_measurement = False else: rttvar = (1 - beta)", "[] interval = 1444 for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval])", "for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n", "import RstegTcp from utils import State, retrans_prob import time class RstegSocket: \"\"\"A wrapper", "* rttvar) # Send cover else: chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk)", "bind(self, host, port): \"\"\"Configures the socket with the parameters supplied.\"\"\" self.dst = host", "self.dst = host self.sport = port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the", "= srtt + max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the data and", "self.rtcp.state != State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes a TCP connection with", "port self.dst = host # Destination host self.rprob = rprob self.rtcp = RstegTcp(self.sport,", "interval = 1446 # payload chunk length # Slice the binary data in", "coding: UTF-8 -*- # Author: <NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from utils", "not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time()", "Wait for ack event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout", "port): \"\"\"Establishes a TCP connection with the host on port.\"\"\" if not self.listening:", "= 0 start_time = time.time() # RTO vars k = 4 g =", "+ max(g, int(k*rttvar)) first_measurement = False else: rttvar = (1- beta) * rttvar", "- rtt)) srtt = (1 - alpha) * srtt + alpha * (time.time()", "0 start_time = time.time() # RTO vars k = 4 g = 0.05", "self.dport = dport # Destination port self.dst = host # Destination host self.rprob", "alpha = 1/8 beta = 1/4 first_measurement = True # Send cover while", "and secret if self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk) # data", "\"\"\" # Do the same for the secret secret_chunks = [] interval =", "empty return None return data def wait_and_recv(self): \"\"\"Waits until end_event is set before", "while len(cover) > 0: # Send cover signal and secret if self.rtcp.secret_signal: chunk", "self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + ' SECRET BYTES') return data def", "has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False", "first_measurement = True # Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk) # set", "= host self.sport = port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the RstegTCP", "to 49512 self.dport = dport # Destination port self.dst = host # Destination", "data to transmit during fake retransmission \"\"\" # Do the same for the", "to MSS and sends it to the TCP receiver.\"\"\" data_chunks = [] interval", "primitives for communicating like Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class", "the TCP receiver.\"\"\" data_chunks = [] interval = 1446 # payload chunk length", "retransmission \"\"\" # Do the same for the secret secret_chunks = [] interval", "rto = 1 alpha = 1/8 beta = 1/4 first_measurement = True #", "max(g, int(k*rttvar)) first_measurement = False else: rttvar = (1- beta) * rttvar +", "self.listening = False # Socket is listening on sport def bind(self, host, port):", "chunk in data_chunks: self.rtcp.send_data(chunk) # set timer rtt = time.time() res = False", "if not res: # timeout self.rtcp.retrans_data(chunk) else: # ack received self.rtcp.ack_event.clear() if first_measurement:", "res = False # while we don't receive ACK while not res: #print(rto)", "(1 - beta) * rttvar + beta * abs(srtt - (time.time() - rtt))", "secret according to the MSS. The data and secret will be sent to", "TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes a", "if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + ' SECRET BYTES') return data", "from rsteg_tcp import RstegTcp from utils import State, retrans_prob import time class RstegSocket:", "self.sport) self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits for a established TCP connection.\"\"\"", "False else: rttvar = (1- beta) * rttvar + beta * abs(srtt -", "= secret_chunks n = 0 start_time = time.time() # RTO vars k =", "take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[length:] return data else: data = self.rtcp.ingress_buffer[:size] # take", "rtt) rto = srtt + max(g, k * rttvar) # Update secret_signal flag", "pushed data in the buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer)", "1444) + ' secret bytes') secret_time = cover_time return cover_time, secret_time def recv(self,", "break #print('# Cover Transfer time: %.2f' % round(time.time() - start_time, 2)) cover_time =", "rttvar) # Update secret_signal flag according to the retrans_prob except if the secret", "buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) +", "int(k*rttvar)) first_measurement = False else: rttvar = (1- beta) * rttvar + beta", "= 0.05 srtt = 0 rttvar = 0 rto = 1 alpha =", "vars k = 4 g = 0.05 srtt = 0 rttvar = 0", "alpha) * srtt + alpha * (time.time() - rtt) rto = srtt +", "in range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n = 0", "Do the same for the secret secret_chunks = [] interval = 1444 for", "= srtt + max(g, k * rttvar) # Send cover else: chunk =", "Flags self.listening = False # Socket is listening on sport def bind(self, host,", "False else: rttvar = (1 - beta) * rttvar + beta * abs(srtt", "= srtt/2 rto = srtt + max(g, int(k*rttvar)) first_measurement = False else: rttvar", "Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time = round(self.rtcp.secret_endtime -", "= sport # Source port, defaults to 49512 self.dport = dport # Destination", "# Send cover else: chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data", "cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data without signal # set timer rtt", "size: # length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length] # take chunk self.rtcp.ingress_buffer =", "srtt + alpha * (time.time() - rtt) rto = srtt + max(g, k*rttvar)", "self.sport = sport # Source port, defaults to 49512 self.dport = dport #", "TCP connection with the host on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port)", "secret_chunks = [] interval = 1444 for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n", "is empty return None return data def wait_and_recv(self): \"\"\"Waits until end_event is set", "host # Destination host self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob) # Rsteg_Tcp", "as cover :param secret: binary data to transmit during fake retransmission \"\"\" #", "cover = cover[1414:] self.rtcp.send_data(chunk) # data with signal rtt = time.time() res =", "# data without signal # set timer rtt = time.time() res = False", "\"\"\"Chunks the data according to MSS and sends it to the TCP receiver.\"\"\"", "str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + '", "Cover data ended before delivering all the secret!') #print('# Delivered ' + str(n", "= False # Socket is listening on sport def bind(self, host, port): \"\"\"Configures", "rsteg_tcp import RstegTcp from utils import State, retrans_prob import time class RstegSocket: \"\"\"A", "that offers socket primitives for communicating like Python sockets.\"\"\" def __init__(self, rprob, host=None,", "timeout=0): \"\"\"Reads the RstegTCP data buffer for new recv data. :param size: integer", "except if the secret has been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob)", "sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('#", "= port self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport)", "' + str(n * 1444) + ' secret bytes') secret_time = cover_time return", "= time.time() # RTO vars k = 4 g = 0.05 srtt =", "if len(self.rtcp.ingress_buffer) != 0: # check if empty if len(self.rtcp.ingress_buffer) <= size: #", "len(self.rtcp.ingress_buffer) != 0: # check if empty if len(self.rtcp.ingress_buffer) <= size: # length", "ACK while not res: #print(rto) # Wait for ack event or timeout res", "new pushed data in the buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if", "event or timeout res = self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack", "data read size :param timeout: seconds for waiting to new pushed data in", "rprob) # Rsteg_Tcp instance self.f_index = 0 # Flags self.listening = False #", "[] interval = 1446 # payload chunk length # Slice the binary data", "and sends it to the TCP receiver.\"\"\" data_chunks = [] interval = 1446", "chunks the size of the payload length for n in range(0, len(data), interval):", "srtt/2 rto = srtt + max(g, int(k*rttvar)) first_measurement = False else: rttvar =", "alpha * (time.time() - rtt) rto = srtt + max(g, k * rttvar)", "self.rtcp.ingress_buffer[:size] # take chunk self.rtcp.ingress_buffer = self.rtcp.ingress_buffer[size:] return data else: # if buffer", "<NAME> <<EMAIL>> from rsteg_tcp import RstegTcp from utils import State, retrans_prob import time", "+ alpha * (time.time() - rtt) rto = srtt + max(g, k*rttvar) def", "# Send cover signal and secret if self.rtcp.secret_signal: chunk = cover[:1414] cover =", "round(time.time() - start_time, 2)) cover_time = round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret", "size: integer for the data read size :param timeout: seconds for waiting to", "the secret secret_chunks = [] interval = 1444 for n in range(0, len(secret),", "port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port) while self.rtcp.state != State.ESTAB: pass def", "True def accept(self): \"\"\"Waits for a established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB:", "+ max(g, int(k * rttvar)) first_measurement = False else: rttvar = (1 -", "rttvar)) first_measurement = False else: rttvar = (1 - beta) * rttvar +", "\"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits", "= False else: rttvar = (1 - beta) * rttvar + beta *", "res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as expected self.rtcp.send_secret() n +=", "communicating like Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport", "a TCP connection with the host on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host,", "def rsend(self, cover, secret): \"\"\"Chunks the data and the secret according to the", "/ 2 rto = srtt + max(g, int(k * rttvar)) first_measurement = False", "a established TCP connection.\"\"\" while self.rtcp.state != State.ESTAB: pass def connect(self, host, port):", "and the secret according to the MSS. The data and secret will be", "- beta) * rttvar + beta * abs(srtt - (time.time() - rtt)) srtt", "secret_time def recv(self, size, timeout=0): \"\"\"Reads the RstegTCP data buffer for new recv", "cover else: chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data without signal", "max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the data and the secret according", "not res: # timeout as expected self.rtcp.send_secret() n += 1 else: self.rtcp.ack_event.clear() if", "the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def accept(self): \"\"\"Waits for", "rsend(self, cover, secret): \"\"\"Chunks the data and the secret according to the MSS.", "#print('# Cover data ended before delivering all the secret!') #print('# Delivered ' +", "the same for the secret secret_chunks = [] interval = 1444 for n", "if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime -", "of the payload length for n in range(0, len(data), interval): data_chunks.append(data[n:n + interval])", "until end_event is set before accessing to the data buffer.\"\"\" data = []", "the data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' +", "= False break #print('# Cover Transfer time: %.2f' % round(time.time() - start_time, 2))", "buffer :return: \"\"\" data = None self.rtcp.psh_event.wait(timeout) if len(self.rtcp.ingress_buffer) != 0: # check", "BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer) print('RECV ' + str(len(data[1])) + ' SECRET BYTES') return", "cover: binary data to transmit as cover :param secret: binary data to transmit", "RTO vars k = 4 g = 0.05 srtt = 0 rttvar =", "(time.time() - rtt) rto = srtt + max(g, k * rttvar) # Update", "k = 4 g = 0.05 srtt = 0 rttvar = 0 rto", "if first_measurement: srtt = time.time() - rtt rttvar = srtt/2 rto = srtt", "Update secret_signal flag according to the retrans_prob except if the secret has been", "listening on sport def bind(self, host, port): \"\"\"Configures the socket with the parameters", "State, retrans_prob import time class RstegSocket: \"\"\"A wrapper for RstegTcp that offers socket", "= time.time() res = False while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if", "+ max(g, k*rttvar) def rsend(self, cover, secret): \"\"\"Chunks the data and the secret", "= [] interval = 1444 for n in range(0, len(secret), interval): secret_chunks.append(secret[n:n +", "time.time() res = False while not res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not", "secret): \"\"\"Chunks the data and the secret according to the MSS. The data", "self.rtcp.ingress_buffer[size:] return data else: # if buffer is empty return None return data", "self.rtcp.send_data(chunk) # set timer rtt = time.time() res = False # while we", "= 1 alpha = 1/8 beta = 1/4 first_measurement = True # Send", "to transmit during fake retransmission \"\"\" # Do the same for the secret", "\"\"\"Reads the RstegTCP data buffer for new recv data. :param size: integer for", "# set timer rtt = time.time() res = False # while we don't", "be sent to the TCP receiver with the RSTEG method. :param cover: binary", "been sent. if not self.rtcp.secret_sent: self.rtcp.secret_signal = retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break", "self.rtcp.state != State.ESTAB: pass def send(self, data): \"\"\"Chunks the data according to MSS", "k * rttvar) # Update secret_signal flag according to the retrans_prob except if", "self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt / 2", "= False # while we don't receive ACK while not res: #print(rto) #", "Python sockets.\"\"\" def __init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport", "1446 # payload chunk length # Slice the binary data in chunks the", "rto = srtt + max(g, int(k*rttvar)) first_measurement = False else: rttvar = (1-", "res: #print(rto) res = self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as expected self.rtcp.send_secret()", "connection with the host on port.\"\"\" if not self.listening: self.listen() self.rtcp.connect(host, port) while", "<reponame>jahosp/rsteg-tcp<filename>rsteg_socket.py #!/usr/bin/python3 # -*- coding: UTF-8 -*- # Author: <NAME> <<EMAIL>> from rsteg_tcp", "if empty if len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer) data = self.rtcp.ingress_buffer[:length]", "(1- beta) * rttvar + beta * abs(srtt - (time.time() - rtt)) srtt", "0 # Flags self.listening = False # Socket is listening on sport def", "data_chunks: self.rtcp.send_data(chunk) # set timer rtt = time.time() res = False # while", "with signal rtt = time.time() res = False while not res: #print(rto) res", "cover signal and secret if self.rtcp.secret_signal: chunk = cover[:1414] cover = cover[1414:] self.rtcp.send_data(chunk)", "(time.time() - rtt) rto = srtt + max(g, k*rttvar) def rsend(self, cover, secret):", "= 1/4 first_measurement = True # Send chunks for chunk in data_chunks: self.rtcp.send_data(chunk)", "4 g = 0.05 srtt = 0 rttvar = 0 rto = 1", "\"\"\"Class constructor.\"\"\" self.sport = sport # Source port, defaults to 49512 self.dport =", "% round(time.time() - start_time, 2)) cover_time = round(time.time() - start_time, 2) if self.rtcp.secret_sent:", "data def wait_and_recv(self): \"\"\"Waits until end_event is set before accessing to the data", "= (1 - alpha) * srtt + alpha * (time.time() - rtt) rto", "else: rttvar = (1 - beta) * rttvar + beta * abs(srtt -", "- rtt rttvar = srtt/2 rto = srtt + max(g, int(k*rttvar)) first_measurement =", "check if empty if len(self.rtcp.ingress_buffer) <= size: # length = len(self.rtcp.ingress_buffer) data =", "else: chunk = cover[:1446] cover = cover[1446:] self.rtcp.send_data(chunk) # data without signal #", "successfully delivered.') #print('# Secret Transfer time: %.2f' % round(self.rtcp.secret_endtime - start_time, 2)) secret_time", "if self.rtcp.ingress_buffer: data.append(self.rtcp.ingress_buffer) print('RECV ' + str(len(data[0])) + ' BYTES') if self.rtcp.ingress_secret_buffer: data.append(self.rtcp.ingress_secret_buffer)", "self.rtcp.ack_event.wait(timeout=rto) if not res: # timeout as expected self.rtcp.send_secret() n += 1 else:", "return data def close(self): \"\"\"Closes the TCP stream.\"\"\" self.rtcp.close() while self.rtcp.state != State.TIME_WAIT:", "is set before accessing to the data buffer.\"\"\" data = [] self.rtcp.end_event.wait() if", "self.rtcp.sport = self.sport def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening", "RstegTcp(self.sport, rprob) # Rsteg_Tcp instance self.f_index = 0 # Flags self.listening = False", "Socket is listening on sport def bind(self, host, port): \"\"\"Configures the socket with", "range(0, len(secret), interval): secret_chunks.append(secret[n:n + interval]) self.rtcp.secret_chunks = secret_chunks n = 0 start_time", "else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar = srtt /", "start_time, 2) else: #print('# Cover data ended before delivering all the secret!') #print('#", "cover, secret): \"\"\"Chunks the data and the secret according to the MSS. The", "= True # Send cover while len(cover) > 0: # Send cover signal", "self.rtcp.ack_event.wait(timeout=rto) if not res: #timeout self.rtcp.retrans_data(chunk) else: #ack received self.rtcp.ack_event.clear() if first_measurement: srtt", "for new recv data. :param size: integer for the data read size :param", "defaults to 49512 self.dport = dport # Destination port self.dst = host #", "start_time, 2)) secret_time = round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover data ended", "def listen(self): \"\"\"Starts the RstegTCP module.\"\"\" self.rtcp.restart(self.rprob, self.sport) self.rtcp.start() self.listening = True def", "the data according to MSS and sends it to the TCP receiver.\"\"\" data_chunks", "else: # ack received self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar", "beta = 1/4 first_measurement = True # Send cover while len(cover) > 0:", "TCP receiver with the RSTEG method. :param cover: binary data to transmit as", "during fake retransmission \"\"\" # Do the same for the secret secret_chunks =", "Delivered ' + str(n * 1444) + ' secret bytes') secret_time = cover_time", "= True def accept(self): \"\"\"Waits for a established TCP connection.\"\"\" while self.rtcp.state !=", "srtt = time.time() - rtt rttvar = srtt/2 rto = srtt + max(g,", "data in chunks the size of the payload length for n in range(0,", "sport def bind(self, host, port): \"\"\"Configures the socket with the parameters supplied.\"\"\" self.dst", "srtt + max(g, int(k*rttvar)) first_measurement = False else: rttvar = (1- beta) *", "retrans_prob(self.rtcp.retrans_prob) else: self.rtcp.secret_signal = False break #print('# Cover Transfer time: %.2f' % round(time.time()", "2)) cover_time = round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully delivered.') #print('#", "for waiting to new pushed data in the buffer :return: \"\"\" data =", "the size of the payload length for n in range(0, len(data), interval): data_chunks.append(data[n:n", "' + str(len(data[1])) + ' SECRET BYTES') return data def close(self): \"\"\"Closes the", "__init__(self, rprob, host=None, dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport # Source port,", "= (1 - beta) * rttvar + beta * abs(srtt - (time.time() -", "pass def connect(self, host, port): \"\"\"Establishes a TCP connection with the host on", "Transfer time: %.2f' % round(time.time() - start_time, 2)) cover_time = round(time.time() - start_time,", "+ ' SECRET BYTES') return data def close(self): \"\"\"Closes the TCP stream.\"\"\" self.rtcp.close()", "n += 1 else: self.rtcp.ack_event.clear() if first_measurement: srtt = time.time() - rtt rttvar", "- start_time, 2)) cover_time = round(time.time() - start_time, 2) if self.rtcp.secret_sent: #print('Secret successfully", "def connect(self, host, port): \"\"\"Establishes a TCP connection with the host on port.\"\"\"", "dport=None, sport=49512): \"\"\"Class constructor.\"\"\" self.sport = sport # Source port, defaults to 49512", "!= State.ESTAB: pass def connect(self, host, port): \"\"\"Establishes a TCP connection with the", "round(self.rtcp.secret_endtime - start_time, 2) else: #print('# Cover data ended before delivering all the", "self.dst = host # Destination host self.rprob = rprob self.rtcp = RstegTcp(self.sport, rprob)", "= srtt + max(g, int(k*rttvar)) first_measurement = False else: rttvar = (1- beta)" ]
[ "nano import solvers geop = nano.Params( R = 35., H = 70., )", "from pore \" import nanopores as nano import solvers geop = nano.Params( R", "= 35., H = 70., ) physp = nano.Params( bulkcon = 1000., bV", "1000., bV = -1., ) geo, pnp = solvers.solve1D(geop, physp) solvers.visualize1D(geo, pnp) nano.showplots()", "= nano.Params( bulkcon = 1000., bV = -1., ) geo, pnp = solvers.solve1D(geop,", "far away from pore \" import nanopores as nano import solvers geop =", "as nano import solvers geop = nano.Params( R = 35., H = 70.,", "nano.Params( R = 35., H = 70., ) physp = nano.Params( bulkcon =", "geop = nano.Params( R = 35., H = 70., ) physp = nano.Params(", "= 70., ) physp = nano.Params( bulkcon = 1000., bV = -1., )", "\" 1D PNP, modelling reservoirs and membrane far away from pore \" import", "bulkcon = 1000., bV = -1., ) geo, pnp = solvers.solve1D(geop, physp) solvers.visualize1D(geo,", "H = 70., ) physp = nano.Params( bulkcon = 1000., bV = -1.,", "PNP, modelling reservoirs and membrane far away from pore \" import nanopores as", "reservoirs and membrane far away from pore \" import nanopores as nano import", "import solvers geop = nano.Params( R = 35., H = 70., ) physp", "away from pore \" import nanopores as nano import solvers geop = nano.Params(", "2016 <NAME> \" 1D PNP, modelling reservoirs and membrane far away from pore", "# (c) 2016 <NAME> \" 1D PNP, modelling reservoirs and membrane far away", "= 1000., bV = -1., ) geo, pnp = solvers.solve1D(geop, physp) solvers.visualize1D(geo, pnp)", "1D PNP, modelling reservoirs and membrane far away from pore \" import nanopores", "import nanopores as nano import solvers geop = nano.Params( R = 35., H", "solvers geop = nano.Params( R = 35., H = 70., ) physp =", "R = 35., H = 70., ) physp = nano.Params( bulkcon = 1000.,", "and membrane far away from pore \" import nanopores as nano import solvers", "nano.Params( bulkcon = 1000., bV = -1., ) geo, pnp = solvers.solve1D(geop, physp)", "pore \" import nanopores as nano import solvers geop = nano.Params( R =", "\" import nanopores as nano import solvers geop = nano.Params( R = 35.,", "70., ) physp = nano.Params( bulkcon = 1000., bV = -1., ) geo,", "modelling reservoirs and membrane far away from pore \" import nanopores as nano", "physp = nano.Params( bulkcon = 1000., bV = -1., ) geo, pnp =", "membrane far away from pore \" import nanopores as nano import solvers geop", "35., H = 70., ) physp = nano.Params( bulkcon = 1000., bV =", "nanopores as nano import solvers geop = nano.Params( R = 35., H =", "= nano.Params( R = 35., H = 70., ) physp = nano.Params( bulkcon", "<NAME> \" 1D PNP, modelling reservoirs and membrane far away from pore \"", ") physp = nano.Params( bulkcon = 1000., bV = -1., ) geo, pnp", "(c) 2016 <NAME> \" 1D PNP, modelling reservoirs and membrane far away from" ]
[ "Library\" content = Content(\"text/plain\", \"some text here\") mail = Mail(from_email, subject, to_email, content)", "with the v3 SendGrid API. Usage example: def build_hello_email(): to_email = from_email =", "body builder Builder for assembling emails to be sent with the v3 SendGrid", "mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail", "SendGrid API. Usage example: def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject =", "= from_email = Email(\"<EMAIL>\") subject = \"Hello World from the SendGrid Python Library\"", "\"some text here\") mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() #", "For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on the v3 API,", "= Content(\"text/plain\", \"some text here\") mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return", "here\") mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request", "Content(\"text/plain\", \"some text here\") mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get()", "response body builder Builder for assembling emails to be sent with the v3", "usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on the v3 API, see https://sendgrid.com/docs/API_Reference/api_v3.html", "Builder for assembling emails to be sent with the v3 SendGrid API. Usage", "mail.get() # assembled request body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more", "text here\") mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled", "to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body For more usage examples,", "to_email = from_email = Email(\"<EMAIL>\") subject = \"Hello World from the SendGrid Python", "for assembling emails to be sent with the v3 SendGrid API. Usage example:", "Usage example: def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject = \"Hello World", "from_email = Email(\"<EMAIL>\") subject = \"Hello World from the SendGrid Python Library\" content", "Email(\"<EMAIL>\") subject = \"Hello World from the SendGrid Python Library\" content = Content(\"text/plain\",", "content = Content(\"text/plain\", \"some text here\") mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\"))", "body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on the v3", "build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject = \"Hello World from the SendGrid", "be sent with the v3 SendGrid API. Usage example: def build_hello_email(): to_email =", "def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject = \"Hello World from the", "v3 SendGrid API. Usage example: def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject", "# assembled request body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information", "\"\"\"v3/mail/send response body builder Builder for assembling emails to be sent with the", "mail = Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body", "assembled request body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on", "request body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on the", "subject = \"Hello World from the SendGrid Python Library\" content = Content(\"text/plain\", \"some", "= Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body For", "content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body For more usage examples, see", "builder Builder for assembling emails to be sent with the v3 SendGrid API.", "World from the SendGrid Python Library\" content = Content(\"text/plain\", \"some text here\") mail", "SendGrid Python Library\" content = Content(\"text/plain\", \"some text here\") mail = Mail(from_email, subject,", "examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on the v3 API, see https://sendgrid.com/docs/API_Reference/api_v3.html \"\"\"", "more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For more information on the v3 API, see", "Python Library\" content = Content(\"text/plain\", \"some text here\") mail = Mail(from_email, subject, to_email,", "= Email(\"<EMAIL>\") subject = \"Hello World from the SendGrid Python Library\" content =", "to be sent with the v3 SendGrid API. Usage example: def build_hello_email(): to_email", "sent with the v3 SendGrid API. Usage example: def build_hello_email(): to_email = from_email", "assembling emails to be sent with the v3 SendGrid API. Usage example: def", "from the SendGrid Python Library\" content = Content(\"text/plain\", \"some text here\") mail =", "subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body For more usage", "emails to be sent with the v3 SendGrid API. Usage example: def build_hello_email():", "the SendGrid Python Library\" content = Content(\"text/plain\", \"some text here\") mail = Mail(from_email,", "return mail.get() # assembled request body For more usage examples, see https://github.com/sendgrid/sendgrid-python/tree/master/examples/helpers/mail For", "API. Usage example: def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject = \"Hello", "the v3 SendGrid API. Usage example: def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\")", "Mail(from_email, subject, to_email, content) mail.personalizations[0].add_to(Email(\"<EMAIL>\")) return mail.get() # assembled request body For more", "example: def build_hello_email(): to_email = from_email = Email(\"<EMAIL>\") subject = \"Hello World from", "\"Hello World from the SendGrid Python Library\" content = Content(\"text/plain\", \"some text here\")", "= \"Hello World from the SendGrid Python Library\" content = Content(\"text/plain\", \"some text" ]
[ "be used to access variables and classes in the # module (loader.myClass()) #", "If a operation is not defined in the default device, let it execute", "loader that can be used to access variables and classes in the #", "as sys_config import logging import tensorflow as tf from collections import Counter from", "array # path with image name at the end but without the ending", "file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img =", "% (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The number of target indices %d", "+ value_string + str(dict[dict_key])) print_string = separator.join(lines) return print_string def module_from_path(path): module_name =", "label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples)", "# takes in sets of indices and changes them to lists with True", "gpu memory, just use it on the go config.allow_soft_placement = True # If", "where the checkpoints are saved :param name: Name under which you saved the", "= [2, 2, 0, 0, 2] source = (source_indices1, source_labels2) target = (target_indices2,", "what needs to be removed s_to_remove = source_counter - target_counter t_to_remove = target_counter", "# Do not assign whole gpu memory, just use it on the go", "s_to_remove = source_counter - target_counter t_to_remove = target_counter - source_counter # change to", "file = file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration", "config file # remove the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() #", "Counter(reduced_target[1]) logging.info('source label count after reduction ' + str(reduced_source_count)) logging.info('target label count after", "name is the same as the folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name", "lists or tuples of indices as well, but the in operation is O(n)", "target_samples.sort(key=sort_key) # change back to a representation with a tuple of lists of", "remove_count(list_of_tuples, remove_counter): # remove tuples with labels specified by remove_counter from the front", "binned_list = [] # for nn in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0:", "the source and target data have the same cardinality and label distribution #", "elements in the dictionary a numerical value that is used for the order", "= nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img,", "path to new folder :return: True if folder created, False if folder already", "tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2) <= set(source_tuples) assert set(target_tuples2) <=", "np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp)", "counts = [ages_binned.count(pp) for pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc", "name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder name", "Helper function to make a new folder if doesn't exist :param folder: path", "init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step) return init_checkpoint_path, last_step", "indices %d and target labels %d is not equal' % (len(target[0]),len(target[1]))) # count", "[ages_binned.count(pp) for pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts]", "nonnegative counts remain, so just what needs to be removed s_to_remove = source_counter", "check whether the label counts of source and target domain are now equal", "device, let it execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def", "ResourceExhaustError when a lot of memory is used config = tf.ConfigProto() config.gpu_options.allow_growth =", "module (loader.myClass()) # if the file_name of the module is not given then", "Authors: # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful functions", "experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s is different from the experiment name", "dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) + ' ' + value_string", "source_counter = Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative counts remain, so just", "True # Do not assign whole gpu memory, just use it on the", "self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module of the experiment and", "the labels source_counter = Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative counts remain,", "tuples with labels specified by remove_counter from the front of the list in", "representation with a list of tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples", "ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65,", "= remove_counter.copy() remove_indices = set() for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) ==", "img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the", "reduction ' + str(reduced_source_count)) logging.info('target label count after reduction ' + str(reduced_target_count)) #", "if ind in keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None): # source and", "2, 0, 0, 2] source = (source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples", "variables and classes in the # module (loader.myClass()) # if the file_name of", "''' Shortcut to save a nifty file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header)", "works with lists or tuples of indices as well, but the in operation", "with a given name :param folder: Folder where the checkpoints are saved :param", "saved :param name: Name under which you saved the model :return: The path", "reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source", "the latest iteration ''' iteration_nums = [] for file in glob.glob(os.path.join(folder, '%s*.meta' %", "file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1] file_base, postfix_and_number, rest =", "if file_name is None: # get experiment config file (assuming it is the", "well, but the in operation is O(n) instead of O(1) selector_result = []", "# Authors: # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful", "the default device, let it execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return", "of labels assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength", "given then the file of the module must be the only .py file", "remove_counter.items()]): raise ValueError('There are negative counts in remove_counter %s' % str(remove_counter)) remove_counter_copy =", "the folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment", "False for label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value", "return tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples with", "not equal' % (len(target[0]),len(target[1]))) # count the labels source_counter = Counter(source[1]) target_counter =", "0] target_indices1 = [1, 4, 5] target_labels1 = [2, 2, 0] source_labels2 =", "path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step)", "checkpoint with the highest iteration number with a given name :param folder: Folder", "print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path", "import tensorflow as tf from collections import Counter from matplotlib.image import imsave def", "reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after", "the files in other_py_files if file_name is None: # get experiment config file", "5, 6, 7] target_labels2 = [2, 2, 0, 0, 2] source = (source_indices1,", "image is 3d numpy array # path with image name at the end", "source2, target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 =", "SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the same as the folder name experiment_folder_name", "if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg, -1)", "len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages,", "data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples =", "age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1)", "t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a representation with a tuple of", "config = tf.ConfigProto() config.gpu_options.allow_growth = True # Do not assign whole gpu memory,", "with a tuple of lists of tuples ([index1, index2, ...], [label1, label2, ...])", "0] source_labels2 = [0, 0, 0] target_indices2 = [1, 4, 5, 6, 7]", "all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list = [element for ind,", "target data have the same cardinality and label distribution # make sure there", "= [1, 4, 5, 6, 7] target_labels2 = [2, 2, 0, 0, 2]", "new folder :return: True if folder created, False if folder already exists '''", "value in fieldstrengths: %s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75,", "of labels and indices if len(source[0]) != len(source[1]): raise ValueError('The number of source", "logging import tensorflow as tf from collections import Counter from matplotlib.image import imsave", "py_file_path).load_module() # experiment name is the same as the folder name experiment_folder_name =", "2] source = (source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples", "index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list =", "vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1 = [0, 2, 3] source_labels1", "target_labels2 = [2, 2, 0, 0, 2] source = (source_indices1, source_labels2) target =", "path_2d, vmin=-1, vmax=1): # image is 3d numpy array # path with image", ":return: The path to the checkpoint with the latest iteration ''' iteration_nums =", "for label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value =", "it is the first python file in log directory) py_file_list = [file for", "# source and target are tuples with (indices, labels corresponding to the indices)", "the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut through the", "image_cut = image[:, 38, :] # rotate the image by 90 degree counterclockwise", "give the elements in the dictionary a numerical value that is used for", "target domain are now equal assert reduced_source_count == reduced_target_count return reduced_source, reduced_target def", "(loader.myClass()) # if the file_name of the module is not given then the", "nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine, header): '''", "and source labels %d is not equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]):", "# remove tuples with labels specified by remove_counter from the front of the", "[0, 2, 0] target_indices1 = [1, 4, 5] target_labels1 = [2, 2, 0]", "# change to a representation with a list of tuples [(index1, label1), ...]", "= np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ ==", "experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='',", "the same as the folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name:", "last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step) return init_checkpoint_path, last_step def", "= os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint", "<NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful functions import nibabel as nib import", "experiment name is the same as the folder name experiment_folder_name = experiment_path.split('/')[-1] if", "= (source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target)", "= tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples =", "count after reduction ' + str(reduced_source_count)) logging.info('target label count after reduction ' +", "# sort by index sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change", "= nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with", "a nifty file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path):", "age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N = ages.shape[0] P = len(bins) ages_mat", "the directory # except for the files in other_py_files if file_name is None:", "key is a function to give the elements in the dictionary a numerical", "the module is not given then the file of the module must be", "np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove)", "corresponding value # assuming only nonnegative counts if not all([item[1] >= 0 for", "source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 =", "the experiment folder name %s is different from the experiment name %s' %", "len(source[1]): raise ValueError('The number of source indices %d and source labels %d is", "# mystruct = Bunch(a=1, b=2) # print(mystruct.a) # >>> 1 class Bunch: def", "nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path)", "remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a list with only", "90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax,", "index in range(max(ind_set))]) return selector_result # Useful shortcut for making struct like contructs", "reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after reduction ' +", "def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step", "cut through the hippocampy image_cut = image[:, 38, :] # rotate the image", "array of labels assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind,", "highest iteration number with a given name :param folder: Folder where the checkpoints", "it on the go config.allow_soft_placement = True # If a operation is not", "under which you saved the model :return: The path to the checkpoint with", "SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' %", "sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) + ' ' + value_string + str(dict[dict_key]))", "nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine, header): ''' Shortcut to", "as the folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the", "np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal", "axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function to make", "''' Helper function to make a new folder if doesn't exist :param folder:", "6, 7] target_labels2 = [2, 2, 0, 0, 2] source = (source_indices1, source_labels2)", "for file in os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)] if len(py_file_list)", "not in other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected py files in log", "for item in remove_counter.items()]): raise ValueError('There are negative counts in remove_counter %s' %", "img_name, path_3d, path_2d, vmin=-1, vmax=1): # image is 3d numpy array # path", "# print(mystruct.a) # >>> 1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path,", "assert reduced_source_count == reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1,", "False def load_nii(img_path): ''' Shortcut to load a nifti file ''' nimg =", "with lists or tuples of indices as well, but the in operation is", "the tuples that have an index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices =", "in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0)", "Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5", "from collections import Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): #", "%s is different from the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module,", "list # field_strength_list must have the same size as label_list # returns a", "whole gpu memory, just use it on the go config.allow_soft_placement = True #", "defined in the default device, let it execute in another. return config def", "file not in other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected py files in", "Folder where the checkpoints are saved :param name: Name under which you saved", "file in the directory # except for the files in other_py_files if file_name", "by index sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to", "# rotate the image by 90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name)", "target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples", "= len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def", "raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65,", "save a nifty file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data,", "are an equal number of labels and indices if len(source[0]) != len(source[1]): raise", "keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None): # source and target are tuples", "# Useful shortcut for making struct like contructs # Example: # mystruct =", "len(target[1]): raise ValueError('The number of target indices %d and target labels %d is", "%s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step) return", "is not defined in the default device, let it execute in another. return", "# except for the files in other_py_files if file_name is None: # get", "model :return: The path to the checkpoint with the latest iteration ''' iteration_nums", "is the first python file in log directory) py_file_list = [file for file", "sure there are an equal number of labels and indices if len(source[0]) !=", "to make a new folder if doesn't exist :param folder: path to new", "0, 0, 2] source = (source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples =", "a numerical value that is used for the order separator = '\\n' lines", "- source_counter # change to a representation with a list of tuples [(index1,", "return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple", "with the latest iteration ''' iteration_nums = [] for file in glob.glob(os.path.join(folder, '%s*.meta'", "nifti file ''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data,", "py_file_list = [file for file in os.listdir(experiment_path) if (file.endswith('.py') and file not in", "+ str(dict_key) + ' ' + value_string + str(dict[dict_key])) print_string = separator.join(lines) return", "= Counter(target[1]) # only nonnegative counts remain, so just what needs to be", "= [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return counts def all_argmax(arr, axis=None): return", "# as the corresponding value # assuming only nonnegative counts if not all([item[1]", "for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) + ' ' +", "so just what needs to be removed s_to_remove = source_counter - target_counter t_to_remove", "in range(max(ind_set))]) return selector_result # Useful shortcut for making struct like contructs #", "path_3d, path_2d, vmin=-1, vmax=1): # image is 3d numpy array # path with", "'.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1 = [0, 2,", "execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in)", "ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for", "just use it on the go config.allow_soft_placement = True # If a operation", ":] # rotate the image by 90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d,", "balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2)", "ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0]", "file_base, postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return", "# works with lists or tuples of indices as well, but the in", "name: Name under which you saved the model :return: The path to the", "ind in keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None): # source and target", "70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg):", "return init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError when a lot of memory", "and indices if len(source[0]) != len(source[1]): raise ValueError('The number of source indices %d", "in keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None): # source and target are", "iteration ''' iteration_nums = [] for file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file", "return reduced_list def balance_source_target(source, target, random_seed=None): # source and target are tuples with", "py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name) # import config file # remove", "= np.max(iteration_nums) return os.path.join(folder, name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes", "2, 3] source_labels1 = [0, 2, 0] target_indices1 = [1, 4, 5] target_labels1", "vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1 = [0, 2, 3] source_labels1 =", "to save a nifty file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def", "enumerate(list_of_tuples) if ind in keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None): # source", "print_string = separator.join(lines) return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module()", "return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list = []", "remove_counter): # remove tuples with labels specified by remove_counter from the front of", "many of each label should get removed # as the corresponding value #", "[:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the same as the", "remove_counter_copy = remove_counter.copy() remove_indices = set() for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values())", "otherwise # works with lists or tuples of indices as well, but the", "a loader that can be used to access variables and classes in the", "# module (loader.myClass()) # if the file_name of the module is not given", "of lists of tuples ([index1, index2, ...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples)", "def index_sets_to_selectors(*index_sets): # takes in sets of indices and changes them to lists", "# else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P", "Do not assign whole gpu memory, just use it on the go config.allow_soft_placement", "number of labels and indices if len(source[0]) != len(source[1]): raise ValueError('The number of", "= list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label", "target_counter = Counter(target[1]) # only nonnegative counts remain, so just what needs to", "Counter or dict of with labels as keys and how many of each", "tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of tuples return", "them to lists with True if the index was in the set and", "target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2) <= set(source_tuples) assert set(target_tuples2) <= set(target_tuples)", "!= len(source[1]): raise ValueError('The number of source indices %d and source labels %d", "in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]]", "[2, 2, 0] source_labels2 = [0, 0, 0] target_indices2 = [1, 4, 5,", "but the in operation is O(n) instead of O(1) selector_result = [] for", "= np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False for label_ind,", "counts def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper", "print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2)", "target_counter t_to_remove = target_counter - source_counter # change to a representation with a", "nonnegative counts if not all([item[1] >= 0 for item in remove_counter.items()]): raise ValueError('There", "used config = tf.ConfigProto() config.gpu_options.allow_growth = True # Do not assign whole gpu", "assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths):", "# path with image name at the end but without the ending .nii.gz", "85)): N = ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins,", "= [0, 2, 3] source_labels1 = [0, 2, 0] target_indices1 = [1, 4,", "current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N = ages.shape[0]", "in the # module (loader.myClass()) # if the file_name of the module is", "how many of each label should get removed # as the corresponding value", "logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' %", "where indices and labels are lists # the returned data has the same", "contructs # Example: # mystruct = Bunch(a=1, b=2) # print(mystruct.a) # >>> 1", "def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module of", "def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function", "and target are tuples with (indices, labels corresponding to the indices) where indices", "= source_counter - target_counter t_to_remove = target_counter - source_counter # change to a", "# binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned =", "not found') py_file_name = py_file_list[0] else: py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name)", "+ ' ' + value_string + str(dict[dict_key])) print_string = separator.join(lines) return print_string def", "is used for the order separator = '\\n' lines = [] for dict_key", "indices and labels are lists # the returned data has the same structure", "dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return", "default device, let it execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in))", "= tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source,", "tuples that have an index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices", "cc in counts] return counts def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis))", "a lot of memory is used config = tf.ConfigProto() config.gpu_options.allow_growth = True #", "' + str(reduced_target_count)) # check whether the label counts of source and target", "os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)] if len(py_file_list) != 1: raise", "source labels %d is not equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise", "print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2) <=", "postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder,", "Bunch(a=1, b=2) # print(mystruct.a) # >>> 1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds)", "save_nii(img_path, data, affine, header): ''' Shortcut to save a nifty file ''' nimg", "tuples (something, label) # remove_counter is a Counter or dict of with labels", "separator = '\\n' lines = [] for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string", "path with image name at the end but without the ending .nii.gz create_and_save_nii(image,", "enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True break if(not valid_value):", "[2, 2, 0, 0, 2] source = (source_indices1, source_labels2) target = (target_indices2, target_labels2)", "module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename)", "= file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration =", "label counts of source and target domain are now equal assert reduced_source_count ==", "experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s is different from the", "an index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list", "else: py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name) # import config file #", "numerical value that is used for the order separator = '\\n' lines =", "ind, element in enumerate(list_of_tuples) if ind in keep_indices] return reduced_list def balance_source_target(source, target,", "py_file_path = os.path.join(experiment_path, py_file_name) # import config file # remove the .py with", "tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples", "# binned_list = [] # for nn in range(N): # if np.sum(ages_ord_reg[nn,:]) >", "config.allow_soft_placement = True # If a operation is not defined in the default", "name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''):", "b=2) # print(mystruct.a) # >>> 1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def", "True break if(not valid_value): raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength) return", "folder: path to new folder :return: True if folder created, False if folder", "value that is used for the order separator = '\\n' lines = []", "if len(py_file_list) != 1: raise ValueError('unexpected py files in log directory or experiment", "len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value", "and target labels %d is not equal' % (len(target[0]),len(target[1]))) # count the labels", "the checkpoint with the latest iteration ''' iteration_nums = [] for file in", "label count after reduction ' + str(reduced_source_count)) logging.info('target label count after reduction '", "equal' % (len(target[0]),len(target[1]))) # count the labels source_counter = Counter(source[1]) target_counter = Counter(target[1])", "lines.append(key_string + str(dict_key) + ' ' + value_string + str(dict[dict_key])) print_string = separator.join(lines)", "logging.info('Latest step was: %d' % last_step) return init_checkpoint_path, last_step def get_session_memory_config(): # prevents", "labels as keys and how many of each label should get removed #", "import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5 list # field_strength_list", "# remove_counter is a Counter or dict of with labels as keys and", "coronal cut through the hippocampy image_cut = image[:, 38, :] # rotate the", "nimg.header def save_nii(img_path, data, affine, header): ''' Shortcut to save a nifty file", "another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a", "for file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1] file_base, postfix_and_number, rest", "= [] for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) + '", "[] # for nn in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1)", "at the end but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz')", "as label_list # returns a numpy array of labels assert len(label_list) == len(field_strength_list)", "zip(*list_in) is a tuple of tuples return tuple(list(element) for element in zip(*list_in)) def", "dict of with labels as keys and how many of each label should", "# remove the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name", "is the same as the folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name !=", "with labels specified by remove_counter from the front of the list in place", "the same structure but the source and target data have the same cardinality", "for index in range(max(ind_set))]) return selector_result # Useful shortcut for making struct like", "to load a nifti file ''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header", "[] for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) + ' '", "operation is not defined in the default device, let it execute in another.", "needs to be removed s_to_remove = source_counter - target_counter t_to_remove = target_counter -", "returns a loader that can be used to access variables and classes in", "== '__main__': source_indices1 = [0, 2, 3] source_labels1 = [0, 2, 0] target_indices1", "init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest", "returned data has the same structure but the source and target data have", "== len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value =", "folder already exists ''' if not os.path.exists(folder): os.makedirs(folder) return True return False def", "same structure but the source and target data have the same cardinality and", "def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module of the experiment and returns", "[element for ind, element in enumerate(list_of_tuples) if ind in keep_indices] return reduced_list def", "the corresponding value # assuming only nonnegative counts if not all([item[1] >= 0", "def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp", "only nonnegative counts remain, so just what needs to be removed s_to_remove =", "selector_result = [] for ind_set in index_sets: selector_result.append([(index in ind_set) for index in", "dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False for label_ind, current_label in", "create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns", "selector_result.append([(index in ind_set) for index in range(max(ind_set))]) return selector_result # Useful shortcut for", "for the order separator = '\\n' lines = [] for dict_key in sorted(dict,", "source_labels2 = [0, 0, 0] target_indices2 = [1, 4, 5, 6, 7] target_labels2", "not os.path.exists(folder): os.makedirs(folder) return True return False def load_nii(img_path): ''' Shortcut to load", "instead of O(1) selector_result = [] for ind_set in index_sets: selector_result.append([(index in ind_set)", "len(target[0]) != len(target[1]): raise ValueError('The number of target indices %d and target labels", "memory, just use it on the go config.allow_soft_placement = True # If a", "tuples of indices as well, but the in operation is O(n) instead of", "np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins)", "os.path.join(folder, name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets of", "in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples with labels specified by remove_counter", "def save_nii(img_path, data, affine, header): ''' Shortcut to save a nifty file '''", "target_counter - source_counter # change to a representation with a list of tuples", "for ind, element in enumerate(list_of_tuples) if ind in keep_indices] return reduced_list def balance_source_target(source,", "then the file of the module must be the only .py file in", "py_file_name = py_file_list[0] else: py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name) # import", "range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return", "counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if", "raise ValueError('There are negative counts in remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy()", "a given name :param folder: Folder where the checkpoints are saved :param name:", "was: %d' % last_step) return init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError when", "is O(n) instead of O(1) selector_result = [] for ind_set in index_sets: selector_result.append([(index", "= list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after reduction", "other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected py files in log directory or", "with the highest iteration number with a given name :param folder: Folder where", "counts of source and target domain are now equal assert reduced_source_count == reduced_target_count", "remove the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is", "target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0)", "in other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected py files in log directory", "header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder,", "python file in log directory) py_file_list = [file for file in os.listdir(experiment_path) if", "return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts =", "target_samples = remove_count(target_samples, t_to_remove) # sort by index sort_key = lambda t: t[0]", "let it execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in):", "label) # remove_counter is a Counter or dict of with labels as keys", "np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with the highest", "data have the same cardinality and label distribution # make sure there are", "= True # If a operation is not defined in the default device,", "0: break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make", "= file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name +", "-= 1 remove_indices.add(ind) # make a list with only the tuples that have", "= get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step", "%s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): #", "%s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N", "def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is a function to give the", "value_string=''): # key is a function to give the elements in the dictionary", "remove_indices.add(ind) # make a list with only the tuples that have an index", "to lists with True if the index was in the set and false", "the list in place # tuples (something, label) # remove_counter is a Counter", "remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort by index sort_key = lambda", "tuples with (indices, labels corresponding to the indices) where indices and labels are", "def age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal,", "valid_value = False for label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] =", "assuming only nonnegative counts if not all([item[1] >= 0 for item in remove_counter.items()]):", "5] target_labels1 = [2, 2, 0] source_labels2 = [0, 0, 0] target_indices2 =", "[] for ind_set in index_sets: selector_result.append([(index in ind_set) for index in range(max(ind_set))]) return", "experiment config file (assuming it is the first python file in log directory)", "target_indices1 = [1, 4, 5] target_labels1 = [2, 2, 0] source_labels2 = [0,", "binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts", "for pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return", "a tuple of tuples return tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter):", "log directory) py_file_list = [file for file in os.listdir(experiment_path) if (file.endswith('.py') and file", "in glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3]", "of the module must be the only .py file in the directory #", "random_seed=None): # source and target are tuples with (indices, labels corresponding to the", "else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a list", "<NAME> (<EMAIL>) # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful functions import nibabel", "the same cardinality and label distribution # make sure there are an equal", ":return: True if folder created, False if folder already exists ''' if not", "last_step) return init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError when a lot of", "found') py_file_name = py_file_list[0] else: py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name) #", "True return False def load_nii(img_path): ''' Shortcut to load a nifti file '''", "label_list # returns a numpy array of labels assert len(label_list) == len(field_strength_list) labels", "= [] for ind_set in index_sets: selector_result.append([(index in ind_set) for index in range(max(ind_set))])", "= Counter(reduced_target[1]) logging.info('source label count after reduction ' + str(reduced_source_count)) logging.info('target label count", "field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True break if(not valid_value): raise ValueError('unexpected value", "labels corresponding to the indices) where indices and labels are lists # the", "Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module", "= [ages_binned.count(pp) for pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in", "t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a representation with a tuple", "a list with only the tuples that have an index in keep_indices all_indices", "[(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed)", "= age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] #", "# get experiment config file (assuming it is the first python file in", "last_step def get_session_memory_config(): # prevents ResourceExhaustError when a lot of memory is used", "field_strength_list must have the same size as label_list # returns a numpy array", "key_function=None, key_string='', value_string=''): # key is a function to give the elements in", "are negative counts in remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices =", "return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename):", "the indices) where indices and labels are lists # the returned data has", "tuple of tuples return tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): #", "np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples,", "whether the label counts of source and target domain are now equal assert", "label count after reduction ' + str(reduced_target_count)) # check whether the label counts", "def load_nii(img_path): ''' Shortcut to load a nifti file ''' nimg = nib.load(img_path)", "are now equal assert reduced_source_count == reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name,", "on the go config.allow_soft_placement = True # If a operation is not defined", "lists # the returned data has the same structure but the source and", "int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name + '-' + str(latest_iteration)) def", "return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages,", "except for the files in other_py_files if file_name is None: # get experiment", "The path to the checkpoint with the latest iteration ''' iteration_nums = []", "structure but the source and target data have the same cardinality and label", "= Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after reduction ' + str(reduced_source_count))", "module must be the only .py file in the directory # except for", "!= len(target[1]): raise ValueError('The number of target indices %d and target labels %d", "file # remove the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment", "in the set and false otherwise # works with lists or tuples of", "with True if the index was in the set and false otherwise #", "def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of tuples", "% (len(target[0]),len(target[1]))) # count the labels source_counter = Counter(source[1]) target_counter = Counter(target[1]) #", "config.gpu_options.allow_growth = True # Do not assign whole gpu memory, just use it", "with a list of tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples =", "each label should get removed # as the corresponding value # assuming only", "raise ValueError('The number of source indices %d and source labels %d is not", "target indices %d and target labels %d is not equal' % (len(target[0]),len(target[1]))) #", "- target_counter t_to_remove = target_counter - source_counter # change to a representation with", "experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is a function to give", "to the checkpoint with the latest iteration ''' iteration_nums = [] for file", "# tuples (something, label) # remove_counter is a Counter or dict of with", "to be removed s_to_remove = source_counter - target_counter t_to_remove = target_counter - source_counter", "rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name", "memory is used config = tf.ConfigProto() config.gpu_options.allow_growth = True # Do not assign", "source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2) <= set(source_tuples) assert", "# make a list with only the tuples that have an index in", "1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads", "in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) + ' ' + value_string +", "loads the module of the experiment and returns a loader that can be", "remove_counter from the front of the list in place # tuples (something, label)", "same cardinality and label distribution # make sure there are an equal number", "key=key_function, reverse=True): lines.append(key_string + str(dict_key) + ' ' + value_string + str(dict[dict_key])) print_string", "...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1])", "= ages_ord_reg.shape[0] # binned_list = [] # for nn in range(N): # if", "counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return counts def all_argmax(arr, axis=None):", "str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set() for ind, tup in enumerate(list_of_tuples): if", "import SourceFileLoader import config.system as sys_config import logging import tensorflow as tf from", "= py_file_list[0] else: py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name) # import config", "source_indices1 = [0, 2, 3] source_labels1 = [0, 2, 0] target_indices1 = [1,", "def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): # image is 3d numpy array", "get_session_memory_config(): # prevents ResourceExhaustError when a lot of memory is used config =", "header): ''' Shortcut to save a nifty file ''' nimg = nib.Nifti1Image(data, affine=affine,", "# shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples, s_to_remove)", "rotate the image by 90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) +", "takes in sets of indices and changes them to lists with True if", "returns a numpy array of labels assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths,", "def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): '''", "True if folder created, False if folder already exists ''' if not os.path.exists(folder):", "from importlib.machinery import SourceFileLoader import config.system as sys_config import logging import tensorflow as", "and returns a loader that can be used to access variables and classes", "you saved the model :return: The path to the checkpoint with the latest", "front of the list in place # tuples (something, label) # remove_counter is", "source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a representation with a tuple of lists", "experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is a function to", "name)): file = file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num)", "and changes them to lists with True if the index was in the", "by remove_counter from the front of the list in place # tuples (something,", "matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5 list #", "range(max(ind_set))]) return selector_result # Useful shortcut for making struct like contructs # Example:", "file_name=None, other_py_files=['standard_parameters.py']): # loads the module of the experiment and returns a loader", "file ''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine,", "number of target indices %d and target labels %d is not equal' %", "for ind_set in index_sets: selector_result.append([(index in ind_set) for index in range(max(ind_set))]) return selector_result", "np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False for label_ind, current_label", "os.makedirs(folder) return True return False def load_nii(img_path): ''' Shortcut to load a nifti", "Returns the checkpoint with the highest iteration number with a given name :param", "np.max(iteration_nums) return os.path.join(folder, name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in", "= [0, 2, 0] target_indices1 = [1, 4, 5] target_labels1 = [2, 2,", "nibabel as nib import numpy as np import os import glob from importlib.machinery", "indices if len(source[0]) != len(source[1]): raise ValueError('The number of source indices %d and", "labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N = ages.shape[0] P =", "# <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful functions import", "in log directory or experiment file not found') py_file_name = py_file_list[0] else: py_file_name", "''' if not os.path.exists(folder): os.makedirs(folder) return True return False def load_nii(img_path): ''' Shortcut", "the # module (loader.myClass()) # if the file_name of the module is not", "of each label should get removed # as the corresponding value # assuming", "!= experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s is different from the experiment", "list with only the tuples that have an index in keep_indices all_indices =", "O(n) instead of O(1) selector_result = [] for ind_set in index_sets: selector_result.append([(index in", "in fieldstrengths: %s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80,", "tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples,", "directory) py_file_list = [file for file in os.listdir(experiment_path) if (file.endswith('.py') and file not", "a numpy array of labels assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16)", "reduced_list def balance_source_target(source, target, random_seed=None): # source and target are tuples with (indices,", "if not all([item[1] >= 0 for item in remove_counter.items()]): raise ValueError('There are negative", "as well, but the in operation is O(n) instead of O(1) selector_result =", "struct like contructs # Example: # mystruct = Bunch(a=1, b=2) # print(mystruct.a) #", "= lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a representation with", "3d numpy array # path with image name at the end but without", "of memory is used config = tf.ConfigProto() config.gpu_options.allow_growth = True # Do not", "have the same cardinality and label distribution # make sure there are an", "binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg))", "N = ages_ord_reg.shape[0] # binned_list = [] # for nn in range(N): #", "38, :] # rotate the image by 90 degree counterclockwise image_cut = np.rot90(image_cut)", "balance_source_target(source, target, random_seed=None): # source and target are tuples with (indices, labels corresponding", "log directory or experiment file not found') py_file_name = py_file_list[0] else: py_file_name =", "''' iteration_nums = [] for file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file =", "experiment folder name %s is different from the experiment name %s' % (experiment_folder_name,", "(<EMAIL>) # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful functions import nibabel as", "raise ValueError('The number of target indices %d and target labels %d is not", "True # If a operation is not defined in the default device, let", "% last_step) return init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError when a lot", "is used config = tf.ConfigProto() config.gpu_options.allow_growth = True # Do not assign whole", "the index was in the set and false otherwise # works with lists", "get removed # as the corresponding value # assuming only nonnegative counts if", "place # tuples (something, label) # remove_counter is a Counter or dict of", "Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after reduction ' + str(reduced_source_count)) logging.info('target", "created, False if folder already exists ''' if not os.path.exists(folder): os.makedirs(folder) return True", "different from the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def", "__init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module of the", "must have the same size as label_list # returns a numpy array of", "target_labels1 = [2, 2, 0] source_labels2 = [0, 0, 0] target_indices2 = [1,", "os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path:", ":param folder: path to new folder :return: True if folder created, False if", "keep_indices = all_indices - remove_indices reduced_list = [element for ind, element in enumerate(list_of_tuples)", "back to a representation with a tuple of lists of tuples ([index1, index2,", "of source and target domain are now equal assert reduced_source_count == reduced_target_count return", "target, random_seed=None): # source and target are tuples with (indices, labels corresponding to", "75, 80, 85)): N = ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat", "str(reduced_target_count)) # check whether the label counts of source and target domain are", "# assuming only nonnegative counts if not all([item[1] >= 0 for item in", "imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1", "classes in the # module (loader.myClass()) # if the file_name of the module", "0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned", "if len(source[0]) != len(source[1]): raise ValueError('The number of source indices %d and source", "of indices and changes them to lists with True if the index was", "only .py file in the directory # except for the files in other_py_files", "tuples source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort by index", "> 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a list with only the", "imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5 list # field_strength_list must", "iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets):", "for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples with labels specified", "label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count =", "config file (assuming it is the first python file in log directory) py_file_list", "is a tuple of tuples return tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples,", "ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]] >", "# import config file # remove the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3],", "reduced_source_count == reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1):", "source_labels2) target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target)", "(source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source)", "file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name + '-'", "0, 0] target_indices2 = [1, 4, 5, 6, 7] target_labels2 = [2, 2,", "= [1, 4, 5] target_labels1 = [2, 2, 0] source_labels2 = [0, 0,", "file_name of the module is not given then the file of the module", "...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples)", "config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of", "iteration_nums = [] for file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1]", "3] source_labels1 = [0, 2, 0] target_indices1 = [1, 4, 5] target_labels1 =", "value # assuming only nonnegative counts if not all([item[1] >= 0 for item", "a representation with a list of tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source)", "np.sum(np.sqrt(counts))) for cc in counts] return counts def all_argmax(arr, axis=None): return np.argwhere(arr ==", "# prevents ResourceExhaustError when a lot of memory is used config = tf.ConfigProto()", "the in operation is O(n) instead of O(1) selector_result = [] for ind_set", "numpy array # path with image name at the end but without the", "tuples ([index1, index2, ...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples)", "= [2, 2, 0] source_labels2 = [0, 0, 0] target_indices2 = [1, 4,", "for making struct like contructs # Example: # mystruct = Bunch(a=1, b=2) #", "init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError when a lot of memory is", "field_strength_list, label_list): # input fieldstrenghts hdf5 list # field_strength_list must have the same", "a new folder if doesn't exist :param folder: path to new folder :return:", "= remove_count(target_samples, t_to_remove) # sort by index sort_key = lambda t: t[0] source_samples.sort(key=sort_key)", "as nib import numpy as np import os import glob from importlib.machinery import", "as np import os import glob from importlib.machinery import SourceFileLoader import config.system as", "checkpoint with the latest iteration ''' iteration_nums = [] for file in glob.glob(os.path.join(folder,", "- remove_indices reduced_list = [element for ind, element in enumerate(list_of_tuples) if ind in", "nimg.affine, nimg.header def save_nii(img_path, data, affine, header): ''' Shortcut to save a nifty", "85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N =", "as keys and how many of each label should get removed # as", "from the front of the list in place # tuples (something, label) #", "of target indices %d and target labels %d is not equal' % (len(target[0]),len(target[1])))", "are saved :param name: Name under which you saved the model :return: The", "tf from collections import Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list):", "source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) #", "collections import Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input", "# count the labels source_counter = Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative", "str(reduced_source_count)) logging.info('target label count after reduction ' + str(reduced_target_count)) # check whether the", "exists ''' if not os.path.exists(folder): os.makedirs(folder) return True return False def load_nii(img_path): '''", "are tuples with (indices, labels corresponding to the indices) where indices and labels", "list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of tuples return tuple(list(element) for", "4, 5, 6, 7] target_labels2 = [2, 2, 0, 0, 2] source =", "the hippocampy image_cut = image[:, 38, :] # rotate the image by 90", "must be the only .py file in the directory # except for the", "a tuple of lists of tuples ([index1, index2, ...], [label1, label2, ...]) reduced_source", "# change back to a representation with a tuple of lists of tuples", "= [file for file in os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)]", "list of tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) #", "with only the tuples that have an index in keep_indices all_indices = set(range(len(list_of_tuples)))", "75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): #", "'\\n' lines = [] for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key)", "fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5 list # field_strength_list must have the", "remain, so just what needs to be removed s_to_remove = source_counter - target_counter", "' + value_string + str(dict[dict_key])) print_string = separator.join(lines) return print_string def module_from_path(path): module_name", "indices and changes them to lists with True if the index was in", "if folder already exists ''' if not os.path.exists(folder): os.makedirs(folder) return True return False", "function to give the elements in the dictionary a numerical value that is", "to new folder :return: True if folder created, False if folder already exists", "specified by remove_counter from the front of the list in place # tuples", "element in enumerate(list_of_tuples) if ind in keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None):", "source and target domain are now equal assert reduced_source_count == reduced_target_count return reduced_source,", "affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def", "label_list): # input fieldstrenghts hdf5 list # field_strength_list must have the same size", "import nibabel as nib import numpy as np import os import glob from", "nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with the", "is not given then the file of the module must be the only", "remove tuples with labels specified by remove_counter from the front of the list", "ValueError('The number of target indices %d and target labels %d is not equal'", "def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5 list # field_strength_list must have", "data, affine, header): ''' Shortcut to save a nifty file ''' nimg =", "which you saved the model :return: The path to the checkpoint with the", "set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list = [element for ind, element in", "if len(target[0]) != len(target[1]): raise ValueError('The number of target indices %d and target", "fieldstrenghts hdf5 list # field_strength_list must have the same size as label_list #", "(<EMAIL>) # <NAME> (<EMAIL>) # Useful functions import nibabel as nib import numpy", "lines = [] for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string + str(dict_key) +", "remove_indices reduced_list = [element for ind, element in enumerate(list_of_tuples) if ind in keep_indices]", "bins=(65, 70, 75, 80, 85)): N = ages.shape[0] P = len(bins) ages_mat =", "the highest iteration number with a given name :param folder: Folder where the", "# Example: # mystruct = Bunch(a=1, b=2) # print(mystruct.a) # >>> 1 class", "removed # as the corresponding value # assuming only nonnegative counts if not", "not all([item[1] >= 0 for item in remove_counter.items()]): raise ValueError('There are negative counts", "py_file_name) # import config file # remove the .py with [:-3] experiment_module =", "change to a representation with a list of tuples [(index1, label1), ...] source_samples", "or dict of with labels as keys and how many of each label", "[1, 4, 5] target_labels1 = [2, 2, 0] source_labels2 = [0, 0, 0]", "label should get removed # as the corresponding value # assuming only nonnegative", "folder :return: True if folder created, False if folder already exists ''' if", "Shortcut to load a nifti file ''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine,", "exist :param folder: path to new folder :return: True if folder created, False", "folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder", "# returns a numpy array of labels assert len(label_list) == len(field_strength_list) labels =", "is not equal' % (len(target[0]),len(target[1]))) # count the labels source_counter = Counter(source[1]) target_counter", "# <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # Useful functions import nibabel as nib", "[1, 4, 5, 6, 7] target_labels2 = [2, 2, 0, 0, 2] source", "tf.ConfigProto() config.gpu_options.allow_growth = True # Do not assign whole gpu memory, just use", "py_file_list[0] else: py_file_name = file_name py_file_path = os.path.join(experiment_path, py_file_name) # import config file", "have an index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices", "# binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1]", "(assuming it is the first python file in log directory) py_file_list = [file", "t_to_remove = target_counter - source_counter # change to a representation with a list", "bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80,", "data has the same structure but the source and target data have the", "([index1, index2, ...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count", "functions import nibabel as nib import numpy as np import os import glob", "remove_indices = set() for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break", "else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P =", "def balance_source_target(source, target, random_seed=None): # source and target are tuples with (indices, labels", "filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d'", "return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine, header): ''' Shortcut to save", "labels specified by remove_counter from the front of the list in place #", "selector_result # Useful shortcut for making struct like contructs # Example: # mystruct", "np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function to make a new folder if", "**kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module of the experiment", "# image is 3d numpy array # path with image name at the", "after reduction ' + str(reduced_target_count)) # check whether the label counts of source", "(<EMAIL>) # Useful functions import nibabel as nib import numpy as np import", "make sure there are an equal number of labels and indices if len(source[0])", "for the files in other_py_files if file_name is None: # get experiment config", "experiment file not found') py_file_name = py_file_list[0] else: py_file_name = file_name py_file_path =", "order separator = '\\n' lines = [] for dict_key in sorted(dict, key=key_function, reverse=True):", "path to the checkpoint with the latest iteration ''' iteration_nums = [] for", "= np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80, 85)):", "if the file_name of the module is not given then the file of", "np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function to make a new", "number of source indices %d and source labels %d is not equal' %", "labels[fs_ind] = current_label valid_value = True break if(not valid_value): raise ValueError('unexpected value in", "reverse=True): lines.append(key_string + str(dict_key) + ' ' + value_string + str(dict[dict_key])) print_string =", "70, 75, 80, 85)): N = ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1)))", "index_sets_to_selectors(*index_sets): # takes in sets of indices and changes them to lists with", "and file not in other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected py files", "logging.warning('warning: the experiment folder name %s is different from the experiment name %s'", "and false otherwise # works with lists or tuples of indices as well,", "first python file in log directory) py_file_list = [file for file in os.listdir(experiment_path)", "'__main__': source_indices1 = [0, 2, 3] source_labels1 = [0, 2, 0] target_indices1 =", "= tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0) print(source2)", "'%s*.meta' % name)): file = file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num =", "4, 5] target_labels1 = [2, 2, 0] source_labels2 = [0, 0, 0] target_indices2", "indices) where indices and labels are lists # the returned data has the", "len(py_file_list) != 1: raise ValueError('unexpected py files in log directory or experiment file", "target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert", "if __name__ == '__main__': source_indices1 = [0, 2, 3] source_labels1 = [0, 2,", "false otherwise # works with lists or tuples of indices as well, but", "access variables and classes in the # module (loader.myClass()) # if the file_name", "of tuples ([index1, index2, ...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target =", "numpy as np import os import glob from importlib.machinery import SourceFileLoader import config.system", "separator.join(lines) return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir,", "target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples)", "nn in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: #", "files in other_py_files if file_name is None: # get experiment config file (assuming", "SourceFileLoader import config.system as sys_config import logging import tensorflow as tf from collections", "+ str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets of indices and changes them", "remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a list with only the tuples that", "(file.endswith('.py') and file not in other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected py", "source_labels1 = [0, 2, 0] target_indices1 = [1, 4, 5] target_labels1 = [2,", "a nifti file ''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path,", "list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after reduction '", "[np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return counts def all_argmax(arr, axis=None): return np.argwhere(arr", "Useful shortcut for making struct like contructs # Example: # mystruct = Bunch(a=1,", "return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): # image is", "in counts] return counts def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def", "image by 90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut,", "enumerate(fieldstrengths): valid_value = False for label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind]", "Example: # mystruct = Bunch(a=1, b=2) # print(mystruct.a) # >>> 1 class Bunch:", "shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples", "= SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the same as the folder name", "changes them to lists with True if the index was in the set", "iteration number with a given name :param folder: Folder where the checkpoints are", "label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True", "get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was:", "that have an index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices -", "name %s is different from the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return", "return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is a function", "print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 =", "# only nonnegative counts remain, so just what needs to be removed s_to_remove", "are lists # the returned data has the same structure but the source", "2, 0] target_indices1 = [1, 4, 5] target_labels1 = [2, 2, 0] source_labels2", "ValueError('The number of source indices %d and source labels %d is not equal'", "# check whether the label counts of source and target domain are now", "False if folder already exists ''' if not os.path.exists(folder): os.makedirs(folder) return True return", "hippocampy image_cut = image[:, 38, :] # rotate the image by 90 degree", "lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a representation with a", "make a list with only the tuples that have an index in keep_indices", "a representation with a tuple of lists of tuples ([index1, index2, ...], [label1,", "doesn't exist :param folder: path to new folder :return: True if folder created,", "ind_set) for index in range(max(ind_set))]) return selector_result # Useful shortcut for making struct", "tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data", "in the directory # except for the files in other_py_files if file_name is", "= all_indices - remove_indices reduced_list = [element for ind, element in enumerate(list_of_tuples) if", "get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with the highest iteration number with a", "def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with the highest iteration number with", "%s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set() for ind, tup in", "get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step =", "ind_set in index_sets: selector_result.append([(index in ind_set) for index in range(max(ind_set))]) return selector_result #", "print(mystruct.a) # >>> 1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None,", "not defined in the default device, let it execute in another. return config", "== np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function to make a new folder", "as the corresponding value # assuming only nonnegative counts if not all([item[1] >=", "target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2)", "[0, 0, 0] target_indices2 = [1, 4, 5, 6, 7] target_labels2 = [2,", "' ' + value_string + str(dict[dict_key])) print_string = separator.join(lines) return print_string def module_from_path(path):", "the elements in the dictionary a numerical value that is used for the", "reduced_target_count = Counter(reduced_target[1]) logging.info('source label count after reduction ' + str(reduced_source_count)) logging.info('target label", "counts remain, so just what needs to be removed s_to_remove = source_counter -", "or experiment file not found') py_file_name = py_file_list[0] else: py_file_name = file_name py_file_path", "in place # tuples (something, label) # remove_counter is a Counter or dict", "with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the same as", "% current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N =", "name at the end but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) +", "'-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets of indices and changes", "in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is", "name): ''' Returns the checkpoint with the highest iteration number with a given", "= remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort by index sort_key =", "return counts def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): '''", "source indices %d and source labels %d is not equal' % (len(source[0]),len(source[1]))) if", "other_py_files if file_name is None: # get experiment config file (assuming it is", "'.nii.gz') # coronal cut through the hippocampy image_cut = image[:, 38, :] #", "def get_session_memory_config(): # prevents ResourceExhaustError when a lot of memory is used config", "of O(1) selector_result = [] for ind_set in index_sets: selector_result.append([(index in ind_set) for", "tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target,", "importlib.machinery import SourceFileLoader import config.system as sys_config import logging import tensorflow as tf", "# the returned data has the same structure but the source and target", "distribution # make sure there are an equal number of labels and indices", "for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]]", "saved the model :return: The path to the checkpoint with the latest iteration", "labels %d is not equal' % (len(target[0]),len(target[1]))) # count the labels source_counter =", "bins=(65, 70, 75, 80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def", "the same size as label_list # returns a numpy array of labels assert", "load_nii(img_path): ''' Shortcut to load a nifti file ''' nimg = nib.load(img_path) return", "in remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set() for ind,", "reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): # image", "ages_ord_reg.shape[0] # binned_list = [] # for nn in range(N): # if np.sum(ages_ord_reg[nn,:])", "break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a", "by 90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin,", "indices as well, but the in operation is O(n) instead of O(1) selector_result", "sys_config import logging import tensorflow as tf from collections import Counter from matplotlib.image", "= os.path.join(experiment_path, py_file_name) # import config file # remove the .py with [:-3]", "keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list = [element for", "reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): # image is 3d numpy", "logging.info('target label count after reduction ' + str(reduced_target_count)) # check whether the label", "np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def", "and labels are lists # the returned data has the same structure but", "nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine, header): ''' Shortcut to save a", "%d is not equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The number", "= ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return", "np.random.shuffle(source_samples) # remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) #", ">= 0 for item in remove_counter.items()]): raise ValueError('There are negative counts in remove_counter", "input fieldstrenghts hdf5 list # field_strength_list must have the same size as label_list", "True if the index was in the set and false otherwise # works", "be removed s_to_remove = source_counter - target_counter t_to_remove = target_counter - source_counter #", "def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of tuples return tuple(list(element) for element", "list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of tuples return tuple(list(element) for element in", "mystruct = Bunch(a=1, b=2) # print(mystruct.a) # >>> 1 class Bunch: def __init__(self,", "file (assuming it is the first python file in log directory) py_file_list =", "# for nn in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) #", "ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list = [] # for nn in", "all([item[1] >= 0 for item in remove_counter.items()]): raise ValueError('There are negative counts in", "== 0: break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) #", "to a representation with a list of tuples [(index1, label1), ...] source_samples =", "# Useful functions import nibabel as nib import numpy as np import os", "set and false otherwise # works with lists or tuples of indices as", "checkpoints are saved :param name: Name under which you saved the model :return:", "valid_value): raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages,", "count after reduction ' + str(reduced_target_count)) # check whether the label counts of", "remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort by", "P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8)", "labels assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in", "experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the same as the folder", "sets of indices and changes them to lists with True if the index", "return False def load_nii(img_path): ''' Shortcut to load a nifti file ''' nimg", "directory # except for the files in other_py_files if file_name is None: #", "if the index was in the set and false otherwise # works with", "% name)): file = file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1])", "module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir,", "the only .py file in the directory # except for the files in", "range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return counts def all_argmax(arr,", "file in os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)] if len(py_file_list) !=", "representation with a tuple of lists of tuples ([index1, index2, ...], [label1, label2,", "index_sets: selector_result.append([(index in ind_set) for index in range(max(ind_set))]) return selector_result # Useful shortcut", "and label distribution # make sure there are an equal number of labels", "in other_py_files if file_name is None: # get experiment config file (assuming it", "import numpy as np import os import glob from importlib.machinery import SourceFileLoader import", "change back to a representation with a tuple of lists of tuples ([index1,", "# key is a function to give the elements in the dictionary a", "experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is a", "negative counts in remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set()", "degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray')", "in log directory) py_file_list = [file for file in os.listdir(experiment_path) if (file.endswith('.py') and", "used for the order separator = '\\n' lines = [] for dict_key in", "and classes in the # module (loader.myClass()) # if the file_name of the", "0, 2] source = (source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source)", "[file for file in os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)] if", "None: # get experiment config file (assuming it is the first python file", "= False for label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label", "== reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): #", "path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path)", "and target data have the same cardinality and label distribution # make sure", "get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in", "file of the module must be the only .py file in the directory", "= tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle data np.random.seed(random_seed) np.random.shuffle(source_samples) np.random.shuffle(source_samples) # remove", "return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s'", "is different from the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path", "or tuples of indices as well, but the in operation is O(n) instead", "= [] # for nn in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: #", "valid_value = True break if(not valid_value): raise ValueError('unexpected value in fieldstrengths: %s' %", ">>> 1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): #", "can be used to access variables and classes in the # module (loader.myClass())", "file not found') py_file_name = py_file_list[0] else: py_file_name = file_name py_file_path = os.path.join(experiment_path,", "age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list", "now equal assert reduced_source_count == reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d,", "source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort by index sort_key", "import config file # remove the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module()", "files in log directory or experiment file not found') py_file_name = py_file_list[0] else:", "counts in remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set() for", "it execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in): return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): #", "(len(target[0]),len(target[1]))) # count the labels source_counter = Counter(source[1]) target_counter = Counter(target[1]) # only", "ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat,", "= set() for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else:", "load a nifti file ''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def", "return list(zip(*tuple_in)) def list_of_tuples_to_tuple_of_lists(list_in): # zip(*list_in) is a tuple of tuples return tuple(list(element)", "list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in range(P)] counts =", "label distribution # make sure there are an equal number of labels and", "os.path.join(experiment_path, py_file_name) # import config file # remove the .py with [:-3] experiment_module", "[0, 2, 3] source_labels1 = [0, 2, 0] target_indices1 = [1, 4, 5]", "# zip(*list_in) is a tuple of tuples return tuple(list(element) for element in zip(*list_in))", "= current_label valid_value = True break if(not valid_value): raise ValueError('unexpected value in fieldstrengths:", "makefolder(folder): ''' Helper function to make a new folder if doesn't exist :param", "(target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2,", "len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False", "corresponding to the indices) where indices and labels are lists # the returned", "%d is not equal' % (len(target[0]),len(target[1]))) # count the labels source_counter = Counter(source[1])", "a function to give the elements in the dictionary a numerical value that", "glob from importlib.machinery import SourceFileLoader import config.system as sys_config import logging import tensorflow", "= '\\n' lines = [] for dict_key in sorted(dict, key=key_function, reverse=True): lines.append(key_string +", "= experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s is", "sort by index sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back", "# >>> 1 class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']):", "prevents ResourceExhaustError when a lot of memory is used config = tf.ConfigProto() config.gpu_options.allow_growth", "== field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True break if(not valid_value): raise ValueError('unexpected", "= [0, 0, 0] target_indices2 = [1, 4, 5, 6, 7] target_labels2 =", "new folder if doesn't exist :param folder: path to new folder :return: True", "file_name py_file_path = os.path.join(experiment_path, py_file_name) # import config file # remove the .py", "axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list = [] # for", "be the only .py file in the directory # except for the files", "def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def get_latest_checkpoint_and_step(logdir, filename): init_checkpoint_path =", "if not os.path.exists(folder): os.makedirs(folder) return True return False def load_nii(img_path): ''' Shortcut to", "index was in the set and false otherwise # works with lists or", "removed s_to_remove = source_counter - target_counter t_to_remove = target_counter - source_counter # change", "go config.allow_soft_placement = True # If a operation is not defined in the", "80, 85)): ages_ordinal = age_to_ordinal_reg_format(ages, bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N", "# make sure there are an equal number of labels and indices if", "sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a representation", "step was: %d' % last_step) return init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError", "+ str(reduced_target_count)) # check whether the label counts of source and target domain", "current_field_strength in enumerate(fieldstrengths): valid_value = False for label_ind, current_label in enumerate(label_list): if(current_field_strength ==", "= (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples)", "filename): init_checkpoint_path = get_latest_model_checkpoint_path(logdir, filename) logging.info('Checkpoint path: %s' % init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1])", "domain are now equal assert reduced_source_count == reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image,", "of the list in place # tuples (something, label) # remove_counter is a", "ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in range(P)]", "# If a operation is not defined in the default device, let it", "source_counter # change to a representation with a list of tuples [(index1, label1),", "other_py_files=['standard_parameters.py']): # loads the module of the experiment and returns a loader that", "list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1]) logging.info('source label count", "as tf from collections import Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list,", "the label counts of source and target domain are now equal assert reduced_source_count", "is a Counter or dict of with labels as keys and how many", "ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut through the hippocampy", "has the same structure but the source and target data have the same", "the set and false otherwise # works with lists or tuples of indices", "img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1 =", "module is not given then the file of the module must be the", "given name :param folder: Folder where the checkpoints are saved :param name: Name", "(indices, labels corresponding to the indices) where indices and labels are lists #", "it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name + '-' +", "a list of tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target)", "all_indices - remove_indices reduced_list = [element for ind, element in enumerate(list_of_tuples) if ind", "+ str(dict[dict_key])) print_string = separator.join(lines) return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return", "nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with the highest iteration", "the checkpoints are saved :param name: Name under which you saved the model", "random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2)", "through the hippocampy image_cut = image[:, 38, :] # rotate the image by", "tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2)", "module of the experiment and returns a loader that can be used to", "target are tuples with (indices, labels corresponding to the indices) where indices and", "already exists ''' if not os.path.exists(folder): os.makedirs(folder) return True return False def load_nii(img_path):", "= [element for ind, element in enumerate(list_of_tuples) if ind in keep_indices] return reduced_list", ":param name: Name under which you saved the model :return: The path to", "an equal number of labels and indices if len(source[0]) != len(source[1]): raise ValueError('The", "+ '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1 = [0,", "if folder created, False if folder already exists ''' if not os.path.exists(folder): os.makedirs(folder)", "remove_count(target_samples, t_to_remove) # sort by index sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key)", "of with labels as keys and how many of each label should get", "making struct like contructs # Example: # mystruct = Bunch(a=1, b=2) # print(mystruct.a)", "> 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg, -1) def get_ordinal_reg_weights(ages_ordinal_reg):", "the first python file in log directory) py_file_list = [file for file in", "to the indices) where indices and labels are lists # the returned data", "the .py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the", "7] target_labels2 = [2, 2, 0, 0, 2] source = (source_indices1, source_labels2) target", "+ str(reduced_source_count)) logging.info('target label count after reduction ' + str(reduced_target_count)) # check whether", "enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -=", "nib import numpy as np import os import glob from importlib.machinery import SourceFileLoader", "# <NAME> (<EMAIL>) # Useful functions import nibabel as nib import numpy as", "source and target are tuples with (indices, labels corresponding to the indices) where", "# remove tuples source_samples = remove_count(source_samples, s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort", "= int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step) return init_checkpoint_path, last_step def get_session_memory_config():", "lists with True if the index was in the set and false otherwise", "remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set() for ind, tup", "folder name %s is different from the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name))", "the experiment and returns a loader that can be used to access variables", "for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False for label_ind, current_label in enumerate(label_list):", "str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets of indices and changes them to", "if doesn't exist :param folder: path to new folder :return: True if folder", "set() for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else: if", "config.system as sys_config import logging import tensorflow as tf from collections import Counter", "folder created, False if folder already exists ''' if not os.path.exists(folder): os.makedirs(folder) return", "in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return counts def", "lot of memory is used config = tf.ConfigProto() config.gpu_options.allow_growth = True # Do", "os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut through the hippocampy image_cut = image[:,", "item in remove_counter.items()]): raise ValueError('There are negative counts in remove_counter %s' % str(remove_counter))", "list in place # tuples (something, label) # remove_counter is a Counter or", "equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The number of target indices", "N = ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1))", "reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): # image is 3d", "print(source) print(target) print(source_tuples) print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2", "= np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70,", "indices %d and source labels %d is not equal' % (len(source[0]),len(source[1]))) if len(target[0])", "ValueError('unexpected py files in log directory or experiment file not found') py_file_name =", "return True return False def load_nii(img_path): ''' Shortcut to load a nifti file", "vmin=-1, vmax=1): # image is 3d numpy array # path with image name", "there are an equal number of labels and indices if len(source[0]) != len(source[1]):", "reduction ' + str(reduced_target_count)) # check whether the label counts of source and", "str(dict[dict_key])) print_string = separator.join(lines) return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name,", "# coronal cut through the hippocampy image_cut = image[:, 38, :] # rotate", "without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut through", "the go config.allow_soft_placement = True # If a operation is not defined in", "''' Shortcut to load a nifti file ''' nimg = nib.load(img_path) return nimg.get_data(),", "in os.listdir(experiment_path) if (file.endswith('.py') and file not in other_py_files)] if len(py_file_list) != 1:", ".py with [:-3] experiment_module = SourceFileLoader(py_file_name[:-3], py_file_path).load_module() # experiment name is the same", "save_image_and_cut(image, img_name, path_3d, path_2d, vmin=-1, vmax=1): # image is 3d numpy array #", "axis=axis)) def makefolder(folder): ''' Helper function to make a new folder if doesn't", "zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples with labels specified by remove_counter from", "equal assert reduced_source_count == reduced_target_count return reduced_source, reduced_target def save_image_and_cut(image, img_name, path_3d, path_2d,", "len(source[0]) != len(source[1]): raise ValueError('The number of source indices %d and source labels", "0] target_indices2 = [1, 4, 5, 6, 7] target_labels2 = [2, 2, 0,", "py files in log directory or experiment file not found') py_file_name = py_file_list[0]", "size as label_list # returns a numpy array of labels assert len(label_list) ==", "s_to_remove) target_samples = remove_count(target_samples, t_to_remove) # sort by index sort_key = lambda t:", "file in log directory) py_file_list = [file for file in os.listdir(experiment_path) if (file.endswith('.py')", "def makefolder(folder): ''' Helper function to make a new folder if doesn't exist", "use it on the go config.allow_soft_placement = True # If a operation is", "equal number of labels and indices if len(source[0]) != len(source[1]): raise ValueError('The number", "in sets of indices and changes them to lists with True if the", "''' Returns the checkpoint with the highest iteration number with a given name", "(something, label) # remove_counter is a Counter or dict of with labels as", "cmap='gray') if __name__ == '__main__': source_indices1 = [0, 2, 3] source_labels1 = [0,", "''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data,", "current_label valid_value = True break if(not valid_value): raise ValueError('unexpected value in fieldstrengths: %s'", "same as the folder name experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning:", "np import os import glob from importlib.machinery import SourceFileLoader import config.system as sys_config", "(N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75, 80, 85)): ages_ordinal =", "labels and indices if len(source[0]) != len(source[1]): raise ValueError('The number of source indices", "same size as label_list # returns a numpy array of labels assert len(label_list)", "is a function to give the elements in the dictionary a numerical value", "# experiment name is the same as the folder name experiment_folder_name = experiment_path.split('/')[-1]", "= nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine, header): ''' Shortcut", "is 3d numpy array # path with image name at the end but", "with (indices, labels corresponding to the indices) where indices and labels are lists", "import config.system as sys_config import logging import tensorflow as tf from collections import", "bins) return np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list =", "of source indices %d and source labels %d is not equal' % (len(source[0]),len(source[1])))", "-1) def get_ordinal_reg_weights(ages_ordinal_reg): ages_binned = list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for", "of the experiment and returns a loader that can be used to access", "= Bunch(a=1, b=2) # print(mystruct.a) # >>> 1 class Bunch: def __init__(self, **kwds):", "__name__ == '__main__': source_indices1 = [0, 2, 3] source_labels1 = [0, 2, 0]", "but the source and target data have the same cardinality and label distribution", ":param folder: Folder where the checkpoints are saved :param name: Name under which", "= separator.join(lines) return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0] return SourceFileLoader(module_name, path).load_module() def", ".nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut through the hippocampy image_cut", "the file_name of the module is not given then the file of the", "index sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) # change back to a", "the module of the experiment and returns a loader that can be used", "key_string='', value_string=''): # key is a function to give the elements in the", "hdf5 list # field_strength_list must have the same size as label_list # returns", "tuple of lists of tuples ([index1, index2, ...], [label1, label2, ...]) reduced_source =", "remove_counter is a Counter or dict of with labels as keys and how", "tensorflow as tf from collections import Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths,", "file_name is None: # get experiment config file (assuming it is the first", "os.path.exists(folder): os.makedirs(folder) return True return False def load_nii(img_path): ''' Shortcut to load a", "directory or experiment file not found') py_file_name = py_file_list[0] else: py_file_name = file_name", "that can be used to access variables and classes in the # module", "the model :return: The path to the checkpoint with the latest iteration '''", "str(dict_key) + ' ' + value_string + str(dict[dict_key])) print_string = separator.join(lines) return print_string", "img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint with the highest iteration number", "folder if doesn't exist :param folder: path to new folder :return: True if", "the order separator = '\\n' lines = [] for dict_key in sorted(dict, key=key_function,", "[] for file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1] file_base, postfix_and_number,", "in remove_counter.items()]): raise ValueError('There are negative counts in remove_counter %s' % str(remove_counter)) remove_counter_copy", "1 remove_indices.add(ind) # make a list with only the tuples that have an", "experiment and returns a loader that can be used to access variables and", "def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N = ages.shape[0] P = len(bins)", "of tuples return tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove", "not given then the file of the module must be the only .py", "value_string + str(dict[dict_key])) print_string = separator.join(lines) return print_string def module_from_path(path): module_name = os.path.splitext(os.path.split(path)[1])[0]", "source_counter - target_counter t_to_remove = target_counter - source_counter # change to a representation", "# input fieldstrenghts hdf5 list # field_strength_list must have the same size as", "if sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1", "fieldstrengths: %s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)):", "0 for item in remove_counter.items()]): raise ValueError('There are negative counts in remove_counter %s'", "tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]] > 0:", "print(target_tuples) source2, target2 = balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2", "have the same size as label_list # returns a numpy array of labels", "if(not valid_value): raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength) return labels def", "string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is a function to give the elements", "2, 0] source_labels2 = [0, 0, 0] target_indices2 = [1, 4, 5, 6,", "target_indices2 = [1, 4, 5, 6, 7] target_labels2 = [2, 2, 0, 0,", "image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__': source_indices1 = [0, 2, 3]", "tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples with labels", "image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__", "= True # Do not assign whole gpu memory, just use it on", "Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative counts remain, so just what needs", "labels source_counter = Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative counts remain, so", "...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count = Counter(reduced_target[1])", "= Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative counts remain, so just what", "in the dictionary a numerical value that is used for the order separator", "from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts hdf5 list", "vmax=1): # image is 3d numpy array # path with image name at", "of the module is not given then the file of the module must", "the checkpoint with the highest iteration number with a given name :param folder:", "and how many of each label should get removed # as the corresponding", "end but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal", "affine, header): ''' Shortcut to save a nifty file ''' nimg = nib.Nifti1Image(data,", "int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step) return init_checkpoint_path, last_step def get_session_memory_config(): #", "print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2) <= set(source_tuples)", "after reduction ' + str(reduced_source_count)) logging.info('target label count after reduction ' + str(reduced_target_count))", "in index_sets: selector_result.append([(index in ind_set) for index in range(max(ind_set))]) return selector_result # Useful", "is not equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The number of", "# loads the module of the experiment and returns a loader that can", "number with a given name :param folder: Folder where the checkpoints are saved", "return os.path.join(folder, name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets", "= image[:, 38, :] # rotate the image by 90 degree counterclockwise image_cut", "= tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2) print(target_tuples2) assert set(source_tuples2) <= set(source_tuples) assert set(target_tuples2)", "in enumerate(fieldstrengths): valid_value = False for label_ind, current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]):", "make a new folder if doesn't exist :param folder: path to new folder", "= int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums) return os.path.join(folder, name + '-' + str(latest_iteration))", "%d and target labels %d is not equal' % (len(target[0]),len(target[1]))) # count the", "index2, ...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count =", "= ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts)))", "experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s is different", "0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a list with only the tuples", "all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function to", "def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list = [] # for nn", "# field_strength_list must have the same size as label_list # returns a numpy", "tuples return tuple(list(element) for element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples", "from the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict,", "name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets of indices", "in ind_set) for index in range(max(ind_set))]) return selector_result # Useful shortcut for making", ".py file in the directory # except for the files in other_py_files if", "the dictionary a numerical value that is used for the order separator =", "of indices as well, but the in operation is O(n) instead of O(1)", "folder: Folder where the checkpoints are saved :param name: Name under which you", "nifty file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img", "glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num", "latest_iteration = np.max(iteration_nums) return os.path.join(folder, name + '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): #", "a Counter or dict of with labels as keys and how many of", "operation is O(n) instead of O(1) selector_result = [] for ind_set in index_sets:", "!= 1: raise ValueError('unexpected py files in log directory or experiment file not", "%d' % last_step) return init_checkpoint_path, last_step def get_session_memory_config(): # prevents ResourceExhaustError when a", "Name under which you saved the model :return: The path to the checkpoint", "just what needs to be removed s_to_remove = source_counter - target_counter t_to_remove =", "source and target data have the same cardinality and label distribution # make", "keys and how many of each label should get removed # as the", "<NAME> (<EMAIL>) # Useful functions import nibabel as nib import numpy as np", "ValueError('unexpected value in fieldstrengths: %s' % current_field_strength) return labels def age_to_ordinal_reg_format(ages, bins=(65, 70,", "create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut through the hippocampy image_cut =", "count the labels source_counter = Counter(source[1]) target_counter = Counter(target[1]) # only nonnegative counts", "in operation is O(n) instead of O(1) selector_result = [] for ind_set in", "labels = np.empty_like(fieldstrengths, dtype=np.int16) for fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False for", "np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png', image_cut, vmin=vmin, vmax=vmax, cmap='gray') if __name__ == '__main__':", "# if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else: # binned_list.append(0) return np.sum(ages_ord_reg,", "a operation is not defined in the default device, let it execute in", "is None: # get experiment config file (assuming it is the first python", "if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s is different from", "= target_counter - source_counter # change to a representation with a list of", "reduced_list = [element for ind, element in enumerate(list_of_tuples) if ind in keep_indices] return", "1: raise ValueError('unexpected py files in log directory or experiment file not found')", "os import glob from importlib.machinery import SourceFileLoader import config.system as sys_config import logging", "in enumerate(list_of_tuples) if ind in keep_indices] return reduced_list def balance_source_target(source, target, random_seed=None): #", "dictionary a numerical value that is used for the order separator = '\\n'", "(len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The number of target indices %d and", "when a lot of memory is used config = tf.ConfigProto() config.gpu_options.allow_growth = True", "current_label in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True break", "with labels as keys and how many of each label should get removed", "class Bunch: def __init__(self, **kwds): self.__dict__.update(kwds) def load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the", "numpy array of labels assert len(label_list) == len(field_strength_list) labels = np.empty_like(fieldstrengths, dtype=np.int16) for", "used to access variables and classes in the # module (loader.myClass()) # if", "labels %d is not equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The", "source = (source_indices1, source_labels2) target = (target_indices2, target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples =", "[label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target = list_of_tuples_to_tuple_of_lists(target_samples) reduced_source_count = Counter(reduced_source[1]) reduced_target_count", "the front of the list in place # tuples (something, label) # remove_counter", "if (file.endswith('.py') and file not in other_py_files)] if len(py_file_list) != 1: raise ValueError('unexpected", "' + str(reduced_source_count)) logging.info('target label count after reduction ' + str(reduced_target_count)) # check", "+ '.nii.gz') # coronal cut through the hippocampy image_cut = image[:, 38, :]", "in the default device, let it execute in another. return config def tuple_of_lists_to_list_of_tuples(tuple_in):", "P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in range(P)] counts = [np.divide(np.sqrt(cc),", "target_labels2) source_tuples = tuple_of_lists_to_list_of_tuples(source) target_tuples = tuple_of_lists_to_list_of_tuples(target) print(source) print(target) print(source_tuples) print(target_tuples) source2, target2", "remove_counter.copy() remove_indices = set() for ind, tup in enumerate(list_of_tuples): if sum(remove_counter.values()) == 0:", "# if the file_name of the module is not given then the file", "import Counter from matplotlib.image import imsave def fstr_to_label(fieldstrengths, field_strength_list, label_list): # input fieldstrenghts", "logging.info('source label count after reduction ' + str(reduced_source_count)) logging.info('target label count after reduction", "sum(remove_counter.values()) == 0: break else: if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind)", "like contructs # Example: # mystruct = Bunch(a=1, b=2) # print(mystruct.a) # >>>", "experiment_folder_name = experiment_path.split('/')[-1] if experiment_folder_name != experiment_module.experiment_name: logging.warning('warning: the experiment folder name %s", "the experiment name %s' % (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None,", "the module must be the only .py file in the directory # except", "and target domain are now equal assert reduced_source_count == reduced_target_count return reduced_source, reduced_target", "that is used for the order separator = '\\n' lines = [] for", "lists of tuples ([index1, index2, ...], [label1, label2, ...]) reduced_source = list_of_tuples_to_tuple_of_lists(source_samples) reduced_target", "''' nimg = nib.load(img_path) return nimg.get_data(), nimg.affine, nimg.header def save_nii(img_path, data, affine, header):", "img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name): ''' Returns the checkpoint", "to access variables and classes in the # module (loader.myClass()) # if the", "shortcut for making struct like contructs # Example: # mystruct = Bunch(a=1, b=2)", "import os import glob from importlib.machinery import SourceFileLoader import config.system as sys_config import", "= file_name py_file_path = os.path.join(experiment_path, py_file_name) # import config file # remove the", "the image by 90 degree counterclockwise image_cut = np.rot90(image_cut) imsave(os.path.join(path_2d, img_name) + '.png',", "for cc in counts] return counts def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr,", "should get removed # as the corresponding value # assuming only nonnegative counts", "image[:, 38, :] # rotate the image by 90 degree counterclockwise image_cut =", "% init_checkpoint_path) last_step = int(init_checkpoint_path.split('/')[-1].split('-')[-1]) logging.info('Latest step was: %d' % last_step) return init_checkpoint_path,", "cardinality and label distribution # make sure there are an equal number of", "image name at the end but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name)", "counts if not all([item[1] >= 0 for item in remove_counter.items()]): raise ValueError('There are", "(experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key is", "assign whole gpu memory, just use it on the go config.allow_soft_placement = True", "ValueError('There are negative counts in remove_counter %s' % str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices", "in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list = [element", "function to make a new folder if doesn't exist :param folder: path to", "+ '-' + str(latest_iteration)) def index_sets_to_selectors(*index_sets): # takes in sets of indices and", "= [] for file in glob.glob(os.path.join(folder, '%s*.meta' % name)): file = file.split('/')[-1] file_base,", "the end but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') #", "only nonnegative counts if not all([item[1] >= 0 for item in remove_counter.items()]): raise", "to a representation with a tuple of lists of tuples ([index1, index2, ...],", "if remove_counter_copy[tup[1]] > 0: remove_counter_copy[tup[1]] -= 1 remove_indices.add(ind) # make a list with", "nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4))", "the returned data has the same structure but the source and target data", "counts] return counts def all_argmax(arr, axis=None): return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder):", "return labels def age_to_ordinal_reg_format(ages, bins=(65, 70, 75, 80, 85)): N = ages.shape[0] P", "pp in range(P)] counts = [np.divide(np.sqrt(cc), np.sum(np.sqrt(counts))) for cc in counts] return counts", "load_log_exp_config(experiment_path, file_name=None, other_py_files=['standard_parameters.py']): # loads the module of the experiment and returns a", "= True break if(not valid_value): raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength)", "= tf.ConfigProto() config.gpu_options.allow_growth = True # Do not assign whole gpu memory, just", "if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True break if(not valid_value): raise", "file.split('/')[-1] file_base, postfix_and_number, rest = file.split('.')[0:3] it_num = int(postfix_and_number.split('-')[-1]) iteration_nums.append(it_num) latest_iteration = np.max(iteration_nums)", "Shortcut to save a nifty file ''' nimg = nib.Nifti1Image(data, affine=affine, header=header) nimg.to_filename(img_path)", "was in the set and false otherwise # works with lists or tuples", "latest iteration ''' iteration_nums = [] for file in glob.glob(os.path.join(folder, '%s*.meta' % name)):", "O(1) selector_result = [] for ind_set in index_sets: selector_result.append([(index in ind_set) for index", "return selector_result # Useful shortcut for making struct like contructs # Example: #", "return np.argwhere(arr == np.amax(arr, axis=axis)) def makefolder(folder): ''' Helper function to make a", "np.sum(ages_ordinal, axis=-1) def ordinal_regression_to_bin(ages_ord_reg): # N = ages_ord_reg.shape[0] # binned_list = [] #", "%d and source labels %d is not equal' % (len(source[0]),len(source[1]))) if len(target[0]) !=", "the file of the module must be the only .py file in the", "raise ValueError('unexpected py files in log directory or experiment file not found') py_file_name", "import logging import tensorflow as tf from collections import Counter from matplotlib.image import", "not assign whole gpu memory, just use it on the go config.allow_soft_placement =", "element in zip(*list_in)) def remove_count(list_of_tuples, remove_counter): # remove tuples with labels specified by", "np.transpose(np.tile(ages,(P,1))) bins_mat = np.tile(bins, (N,1)) return np.array(ages_mat>bins_mat, dtype=np.uint8) def age_to_bins(ages, bins=(65, 70, 75,", "in enumerate(label_list): if(current_field_strength == field_strength_list[label_ind]): labels[fs_ind] = current_label valid_value = True break if(not", "of tuples [(index1, label1), ...] source_samples = tuple_of_lists_to_list_of_tuples(source) target_samples = tuple_of_lists_to_list_of_tuples(target) # shuffle", "= set(range(len(list_of_tuples))) keep_indices = all_indices - remove_indices reduced_list = [element for ind, element", "labels are lists # the returned data has the same structure but the", "name :param folder: Folder where the checkpoints are saved :param name: Name under", "with image name at the end but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d,", "Counter(target[1]) # only nonnegative counts remain, so just what needs to be removed", "= list(ordinal_regression_to_bin(ages_ordinal_reg)) P = ages_ordinal_reg.shape[1] counts = [ages_binned.count(pp) for pp in range(P)] counts", "def remove_count(list_of_tuples, remove_counter): # remove tuples with labels specified by remove_counter from the", "fs_ind, current_field_strength in enumerate(fieldstrengths): valid_value = False for label_ind, current_label in enumerate(label_list): if(current_field_strength", "Useful functions import nibabel as nib import numpy as np import os import", "nimg.to_filename(img_path) def create_and_save_nii(data, img_path): img = nib.Nifti1Image(data, np.eye(4)) nib.save(img, img_path) def get_latest_model_checkpoint_path(folder, name):", "import glob from importlib.machinery import SourceFileLoader import config.system as sys_config import logging import", "% (experiment_folder_name, experiment_module.experiment_name)) return experiment_module, experiment_path def string_dict_in_order(dict, key_function=None, key_string='', value_string=''): # key", "t_to_remove) # sort by index sort_key = lambda t: t[0] source_samples.sort(key=sort_key) target_samples.sort(key=sort_key) #", "to give the elements in the dictionary a numerical value that is used", "only the tuples that have an index in keep_indices all_indices = set(range(len(list_of_tuples))) keep_indices", "break if(not valid_value): raise ValueError('unexpected value in fieldstrengths: %s' % current_field_strength) return labels", "target labels %d is not equal' % (len(target[0]),len(target[1]))) # count the labels source_counter", "# N = ages_ord_reg.shape[0] # binned_list = [] # for nn in range(N):", "but without the ending .nii.gz create_and_save_nii(image, os.path.join(path_3d, img_name) + '.nii.gz') # coronal cut", "get experiment config file (assuming it is the first python file in log", "<gh_stars>1-10 # Authors: # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) # <NAME> (<EMAIL>) #", "not equal' % (len(source[0]),len(source[1]))) if len(target[0]) != len(target[1]): raise ValueError('The number of target", "% str(remove_counter)) remove_counter_copy = remove_counter.copy() remove_indices = set() for ind, tup in enumerate(list_of_tuples):", "= balance_source_target(source, target, random_seed=0) print(source2) print(target2) source_tuples2 = tuple_of_lists_to_list_of_tuples(source2) target_tuples2 = tuple_of_lists_to_list_of_tuples(target2) print(source_tuples2)", "80, 85)): N = ages.shape[0] P = len(bins) ages_mat = np.transpose(np.tile(ages,(P,1))) bins_mat =", "img_name) + '.nii.gz') # coronal cut through the hippocampy image_cut = image[:, 38,", "for nn in range(N): # if np.sum(ages_ord_reg[nn,:]) > 0: # binned_list.append(all_argmax(ages_ord_reg[nn,:])[-1][0]+1) # else:" ]
[ "\"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\")", "class ThirdStorage(Migration): def up(self): \"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table:", "Migration class ThirdStorage(Migration): def up(self): \"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as", "with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def", "\"\"\"ThirdStorage Migration.\"\"\" from masoniteorm.migrations import Migration class ThirdStorage(Migration): def up(self): \"\"\" Run the", "import Migration class ThirdStorage(Migration): def up(self): \"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\")", "Migration.\"\"\" from masoniteorm.migrations import Migration class ThirdStorage(Migration): def up(self): \"\"\" Run the migrations.", "migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\")", "table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self): \"\"\" Revert the", "up(self): \"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\")", "the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\")", "table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self): \"\"\" Revert the migrations. \"\"\"", "as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self): \"\"\"", "\"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps()", "table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self): \"\"\" Revert the migrations. \"\"\" self.schema.drop(\"thirdstorages\")", "table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self): \"\"\" Revert the migrations.", "self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self):", "from masoniteorm.migrations import Migration class ThirdStorage(Migration): def up(self): \"\"\" Run the migrations. \"\"\"", "Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\")", "def up(self): \"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\") table.string(\"thirdstorage_name\")", "masoniteorm.migrations import Migration class ThirdStorage(Migration): def up(self): \"\"\" Run the migrations. \"\"\" with", "table: table.increments(\"id\") table.string(\"thirdstorage_name\") table.string(\"thirdstorage_brand\") table.string(\"thirdstorage_type\") table.string(\"thirdstorage_size\") table.integer(\"thirdstorage_price\") table.string(\"thirdstorage_img\") table.timestamps() def down(self): \"\"\" Revert", "<filename>databases/migrations/2021_12_09_065718_ThirdStorage.py \"\"\"ThirdStorage Migration.\"\"\" from masoniteorm.migrations import Migration class ThirdStorage(Migration): def up(self): \"\"\" Run", "ThirdStorage(Migration): def up(self): \"\"\" Run the migrations. \"\"\" with self.schema.create(\"thirdstorages\") as table: table.increments(\"id\")" ]
[]
[ "FacebookDataSetEntry: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number", "comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number = likes_number self.comments_number =", "self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self,", "likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number = likes_number self.comments_number =", "label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number =", "self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self,", "grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number", "self.comments_number = comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance class", "quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label", "= tweets_number self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index =", "def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number =", "self.grammar_index = grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number,", "followers_number self.verified = verified self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets = quote_tweets", "followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number = likes_number", "retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified = verified self.tweets_number =", "= comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry:", "self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets", "share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified,", "= grammar_index self.subject_relevance = subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number,", "grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number =", "self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number = share_number self.grammar_index", "self.comments_number = comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label", "likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def", "verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified =", "self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance):", "likes_number self.comments_number = comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance", "grammar_index self.subject_relevance = subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified,", "tweets_number self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index", "tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified = verified self.tweets_number", "def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number", "grammar_index, subject_relevance): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets =", "= retweets self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance =", "verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified = verified", "= followers_number self.verified = verified self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets =", "= likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class TwitterDataSetEntryUnlabeled:", "= grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets,", "subject_relevance, label): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets =", "FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number", "= likes_number self.comments_number = comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance =", "subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance,", "followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified =", "self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label", "grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets", "def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number", "grammar_index self.subject_relevance = subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number,", "= subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index,", "subject_relevance, label): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number =", "share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number", "= share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number,", "self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance", "= subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets,", "followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number = likes_number self.comments_number", "= share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class FacebookDataSetEntryUnlabeled:", "= verified self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number =", "class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number", "comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label", "quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number", "def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number =", "self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class", "= subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number,", "subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets,", "__init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified", "__init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number", "followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified", "__init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number =", "self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index", "self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number,", "self.verified = verified self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number", "share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number", "self.subject_relevance = subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number,", "self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self,", "comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number", "= grammar_index self.subject_relevance = subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number,", "comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def", "= comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label =", "<reponame>CDU55/FakeNews class FacebookDataSetEntry: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number", "label): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number = share_number", "label): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets = retweets", "share_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def", "tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified = verified", "retweets self.quote_tweets = quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance", "self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label = label class", "likes_number, grammar_index, subject_relevance): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets", "followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number = share_number self.grammar_index = grammar_index", "TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number", "= followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number = share_number self.grammar_index =", "= label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number", "class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number", "subject_relevance): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number self.retweets = retweets", "self.likes_number = likes_number self.comments_number = comments_number self.share_number = share_number self.grammar_index = grammar_index self.subject_relevance", "class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label):", "= quote_tweets self.likes_number = likes_number self.grammar_index = grammar_index self.subject_relevance = subject_relevance self.label =", "subject_relevance): self.followers_number = followers_number self.likes_number = likes_number self.comments_number = comments_number self.share_number = share_number", "subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index,", "label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance):", "__init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance): self.followers_number = followers_number self.likes_number = likes_number", "self.subject_relevance = subject_relevance self.label = label class FacebookDataSetEntryUnlabeled: def __init__(self, followers_number, likes_number, comments_number,", "verified self.tweets_number = tweets_number self.retweets = retweets self.quote_tweets = quote_tweets self.likes_number = likes_number", "class FacebookDataSetEntry: def __init__(self, followers_number, likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number =", "grammar_index self.subject_relevance = subject_relevance class TwitterDataSetEntry: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets,", "likes_number, comments_number, share_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.likes_number = likes_number self.comments_number", "TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index, subject_relevance): self.followers_number =", "= label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number, grammar_index,", "likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified = verified self.tweets_number = tweets_number", "retweets, quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified = verified self.tweets_number", "quote_tweets, likes_number, grammar_index, subject_relevance, label): self.followers_number = followers_number self.verified = verified self.tweets_number =", "self.label = label class TwitterDataSetEntryUnlabeled: def __init__(self, followers_number, verified, tweets_number, retweets, quote_tweets, likes_number," ]
[ "rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear", "print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]';", "ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print", "print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print", "\", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv,", "pyStk; import numpy as np; import math from scipy import stats; #print rf.refData.shape", "#npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear =", "= pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF =", "ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median:", "print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max resid", "print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print", "= pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts'];", "'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print", "from scipy import stats; #print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape;", "\"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats =", "pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear);", "#print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv", "#print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf =", "pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print", "'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max", "# #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape", "#print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts", "pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \",", "= pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean:", "ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF", "epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print", "print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max", "rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv =", "\"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print", "# #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear", "#print rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat');", "' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max resid indx:',stats['dvMaxIndx'][0]", "#print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv =", "\",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout'];", "= ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF)", "pyRF; import pyStk; import numpy as np; import math from scipy import stats;", "rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list));", "math from scipy import stats; #print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print", "scipy import stats; #print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print", "\"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout'];", "stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print", "rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print", "'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max resid indx:',stats['dvMaxIndx'][0] print 'max resid stn:',ts.stn_list[stats['dvMaxIndx']/3]", "RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max resid indx:',stats['dvMaxIndx'][0] print", "np; import math from scipy import stats; #print rf.refData.shape #print len(rf.refStnList) # #print", "print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print '", "fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF)", "import stats; #print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape;", "len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print", "T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print", "import numpy as np; import math from scipy import stats; #print rf.refData.shape #print", "as np; import math from scipy import stats; #print rf.refData.shape #print len(rf.refStnList) #", "print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3,", "npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \",", "rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts =", "print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print", "npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning", "rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf", "rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv", "print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print", "stats; #print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; #", "import math from scipy import stats; #print rf.refData.shape #print len(rf.refStnList) # #print rf.npv.shape;", "print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]'", "numpy as np; import math from scipy import stats; #print rf.refData.shape #print len(rf.refStnList)", "= rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch", "npvRF); print print 'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print", "print \"mean: \", stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats", "import pyStk; import numpy as np; import math from scipy import stats; #print", "import pyRF; import pyStk; import numpy as np; import math from scipy import", "npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000]; npv = ts.npvForEpoch(fyear);", "\", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF); print print 'iter:',stats['iter'];", "#print len(rf.refStnList) # #print rf.npv.shape; #print rf.nvv.shape; #print rf.refEpoch.shape; # #npv = rf.npvForEpoch(2003.50414524)", "stats.nanmean(npv-npvRF) print \"median: \", stats.nanmedian(npv-npvRF) print \"Aligning epoch \",fyear T,npvT,stats = pyStk.helmert(npv, npvRF);", "'[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max resid indx:',stats['dvMaxIndx'][0] print 'max", "= ts.epochs[4000]; npv = ts.npvForEpoch(fyear); npvRF = rf.npvForEpoch(fyear); print \"mean: \", stats.nanmean(npv-npvRF) print", "= rf.npvForEpoch(2003.50414524) #print npv.shape ts = pyStk.pyTS().initFromMatFile('../data/ts.mat'); rf = pyRF.pyRF('itrf08').initForStnList(map(str.lower,ts.stn_list)); fyear = ts.epochs[4000];", "'iter:',stats['iter']; print print 'pout:',stats['pout']; print 'nout:',stats['nout']; print 'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]'", "'npts:',stats['npts']; print print ' RMS:',stats['RMS']/1e-3, '[mm]' print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print", "print 'wRMS:',stats['wRMS']/1e-3,'[mm]'; print print 'max resid:',stats['dvMax']/1e-3,'[mm]' print 'max resid indx:',stats['dvMaxIndx'][0] print 'max resid" ]
[ "schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save()", "Generated by Django 3.1.3 on 2020-11-25 11:09 from django.db import migrations from django.utils.text", "= slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'), ] operations =", "class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'), ] operations = [ migrations.RunPython(popular_slug) ]", "in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'),", "Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'), ]", "django.db import migrations from django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos',", "apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies", "3.1.3 on 2020-11-25 11:09 from django.db import migrations from django.utils.text import slugify def", "django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo in", "2020-11-25 11:09 from django.db import migrations from django.utils.text import slugify def popular_slug(apps, schema_editor):", "Modulo = apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class", "modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'), ] operations = [ migrations.RunPython(popular_slug)", "'Modulo') for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies =", "for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [", "by Django 3.1.3 on 2020-11-25 11:09 from django.db import migrations from django.utils.text import", "migrations from django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for", "on 2020-11-25 11:09 from django.db import migrations from django.utils.text import slugify def popular_slug(apps,", "slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug", "Django 3.1.3 on 2020-11-25 11:09 from django.db import migrations from django.utils.text import slugify", "11:09 from django.db import migrations from django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo", "# Generated by Django 3.1.3 on 2020-11-25 11:09 from django.db import migrations from", "import migrations from django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo')", "= apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration):", "popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo)", "def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all(): modulo.slug =", "modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'), ] operations", "import slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo in Modulo.objects.all():", "from django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo = apps.get_model('modulos', 'Modulo') for modulo", "from django.db import migrations from django.utils.text import slugify def popular_slug(apps, schema_editor): Modulo =", "modulo in Modulo.objects.all(): modulo.slug = slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos',", "slugify(modulo.titulo) modulo.save() class Migration(migrations.Migration): dependencies = [ ('modulos', '0003_modulo_slug'), ] operations = [" ]
[ "import path from .views import index, search, todo_list_view urlpatterns = [ path('', index,", "path from .views import index, search, todo_list_view urlpatterns = [ path('', index, name='index'),", "from .views import index, search, todo_list_view urlpatterns = [ path('', index, name='index'), path('list',", "import index, search, todo_list_view urlpatterns = [ path('', index, name='index'), path('list', todo_list_view, name='list'),", "todo_list_view urlpatterns = [ path('', index, name='index'), path('list', todo_list_view, name='list'), path('search/', search, name='search')", "index, search, todo_list_view urlpatterns = [ path('', index, name='index'), path('list', todo_list_view, name='list'), path('search/',", "<gh_stars>0 from django.urls import path from .views import index, search, todo_list_view urlpatterns =", "from django.urls import path from .views import index, search, todo_list_view urlpatterns = [", "django.urls import path from .views import index, search, todo_list_view urlpatterns = [ path('',", ".views import index, search, todo_list_view urlpatterns = [ path('', index, name='index'), path('list', todo_list_view,", "urlpatterns = [ path('', index, name='index'), path('list', todo_list_view, name='list'), path('search/', search, name='search') ]", "search, todo_list_view urlpatterns = [ path('', index, name='index'), path('list', todo_list_view, name='list'), path('search/', search," ]
[ "input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input vt_client = None file_paths = []", "io import sys import tau_clients import vt from tau_clients import decoders from tau_clients", "e: raise ValueError(str(e)) from e def main(): \"\"\"Submit all samples or hashes by", "parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument( \"-b\",", "configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode", "'{str(ae)}' when submitting file {file_path}\") for file_hash in file_hashes: try: file_data = download_from_vt(vt_client,", "import configparser import io import sys import tau_clients import vt from tau_clients import", "bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args", "try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not", "{file_hash}\") if vt_client: vt_client.close() print(f\"All files have been submitted ({len(submissions)} submissions)\") # Wait", "if vt_client: vt_client.close() print(f\"All files have been submitted ({len(submissions)} submissions)\") # Wait for", "submissions = [] task_to_source = {} for file_path in file_paths: with open(file_path, \"rb\")", "-> bytes: \"\"\" Download file from VT. :param vt.Client client: the VT client", "file {file_hash}\") if vt_client: vt_client.close() print(f\"All files have been submitted ({len(submissions)} submissions)\") #", "downloaded data :raises ValueError: in case of any error \"\"\" try: buffer =", "data :raises ValueError: in case of any error \"\"\" try: buffer = io.BytesIO()", "error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except", "configparser import io import sys import tau_clients import vt from tau_clients import decoders", "Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse", "import decoders from tau_clients import exceptions from tau_clients import nsx_defender def download_from_vt(client: vt.Client,", "configparser.Error: print(\"VT credentials not found. Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif", "completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if", "elif input_type is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input", "samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {} for", "from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the", "conf = configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\")", "= analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}'", "# SPDX-License-Identifier: BSD-2 import argparse import configparser import io import sys import tau_clients", "f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as", "# Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {} for file_path", "submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File", "analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve: print(f\"Error '{str(ve)}' when", "import nsx_defender def download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\" Download file from", "else: raise ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)}", "buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError) as e: raise ValueError(str(e)) from e", "nsx_defender def download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\" Download file from VT.", "in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}'", "exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\") for file_hash in file_hashes:", "\"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\",", "KeyboardInterrupt: print(\"Waiting for results interrupted by user\") print(\"Done\") return 0 if __name__ ==", "client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError) as e: raise ValueError(str(e))", "file_paths: with open(file_path, \"rb\") as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]]", "except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if vt_client: vt_client.close()", "file hashes and {len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = []", "the downloaded data :raises ValueError: in case of any error \"\"\" try: buffer", ":param str file_hash: the file hash :rtype: bytes :return: the downloaded data :raises", "print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted by user\")", "print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All files have been", "Copyright 2021 VMware, Inc. # SPDX-License-Identifier: BSD-2 import argparse import configparser import io", "task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else:", "'{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished", "from tau_clients import exceptions from tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash: str)", "tau_clients import decoders from tau_clients import exceptions from tau_clients import nsx_defender def download_from_vt(client:", "in case of any error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0,", "input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not", "submitting file {file_path}\") for file_hash in file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret", "parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args() conf = configparser.ConfigParser()", "from e def main(): \"\"\"Submit all samples or hashes by downloading from VT", "for file_hash in file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache)", "and {len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source =", "client: the VT client :param str file_hash: the file hash :rtype: bytes :return:", "when submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All files have been submitted ({len(submissions)}", "import argparse import configparser import io import sys import tau_clients import vt from", "conf.read(args.config_file) # Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input", "exceptions from tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\"", "bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve: print(f\"Error '{str(ve)}' when downloading", "\"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser,", "= [] task_to_source = {} for file_path in file_paths: with open(file_path, \"rb\") as", "0) return buffer.read() except (IOError, vt.APIError) as e: raise ValueError(str(e)) from e def", "import exceptions from tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash: str) -> bytes:", "'{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted by user\") print(\"Done\")", "the VT client :param str file_hash: the file hash :rtype: bytes :return: the", "# Parse the input vt_client = None file_paths = [] file_hashes = []", "submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not", "type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input", "else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting", "ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\") for file_hash in file_hashes: try: file_data", "None file_paths = [] file_hashes = [] if input_type is decoders.InputType.FILE_HASH: try: vt_client", "\"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[", "analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was", "'{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting", "try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae:", "start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted", "found. Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for", "vt_client: vt_client.close() print(f\"All files have been submitted ({len(submissions)} submissions)\") # Wait for completion", "arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input vt_client = None file_paths =", "{file_path}\") for file_hash in file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data,", "file_hash in file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret)", "print(f\"All files have been submitted ({len(submissions)} submissions)\") # Wait for completion try: for", "input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the", "\"\"\" Download file from VT. :param vt.Client client: the VT client :param str", "{file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if vt_client:", "vt.APIError) as e: raise ValueError(str(e)) from e def main(): \"\"\"Submit all samples or", "bytes :return: the downloaded data :raises ValueError: in case of any error \"\"\"", "is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not found.", "file_hashes = [] if input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except", "submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {} for file_path in file_paths:", ") decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args() conf", "file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown", "default=False, help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE,", ") args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client", "args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client analysis_client", "to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], )", "file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if", "VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config", "task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File", "[] if input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT", "file_hash: the file hash :rtype: bytes :return: the downloaded data :raises ValueError: in", "vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not found. Hash submissions are", "of any error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return", "try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError)", "hashes and {len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source", "\"apikey\")) except configparser.Error: print(\"VT credentials not found. Hash submissions are disabled\") return 1", "ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)} samples\") #", "# Copyright 2021 VMware, Inc. # SPDX-License-Identifier: BSD-2 import argparse import configparser import", "ValueError(str(e)) from e def main(): \"\"\"Submit all samples or hashes by downloading from", "[] task_to_source = {} for file_path in file_paths: with open(file_path, \"rb\") as f:", "= vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not found. Hash submissions are disabled\")", "= parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client analysis_client =", "buffer) buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError) as e: raise ValueError(str(e)) from", "ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All files have", "or hashes by downloading from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\",", "decoders.InputType.FILE, ], ) args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load the", "tau_clients import vt from tau_clients import decoders from tau_clients import exceptions from tau_clients", "str file_hash: the file hash :rtype: bytes :return: the downloaded data :raises ValueError:", "file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\") for file_hash", "python # Copyright 2021 VMware, Inc. # SPDX-License-Identifier: BSD-2 import argparse import configparser", "analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {} for file_path in file_paths: with open(file_path,", ":return: the downloaded data :raises ValueError: in case of any error \"\"\" try:", "file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)} file hashes and", "been submitted ({len(submissions)} submissions)\") # Wait for completion try: for submission in analysis_client.yield_completed_tasks(", "(IOError, vt.APIError) as e: raise ValueError(str(e)) from e def main(): \"\"\"Submit all samples", "{task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted by user\") print(\"Done\") return 0 if", "samples or hashes by downloading from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\",", "hash :rtype: bytes :return: the downloaded data :raises ValueError: in case of any", "all samples or hashes by downloading from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument(", "credentials not found. Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is", "return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else:", "download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as", "into {len(file_hashes)} file hashes and {len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions", "import vt from tau_clients import decoders from tau_clients import exceptions from tau_clients import", "parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\",", "submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve: print(f\"Error '{str(ve)}' when downloading file", "help=\"read config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to", "print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when", "{len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {}", "input into {len(file_hashes)} file hashes and {len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp()", "'{str(ae)}' when submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All files have been submitted", "except KeyboardInterrupt: print(\"Waiting for results interrupted by user\") print(\"Done\") return 0 if __name__", ":rtype: bytes :return: the downloaded data :raises ValueError: in case of any error", "the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args =", "[] file_hashes = [] if input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\"))", "\"analysis\") # Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, )", "= argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", )", "as ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All files", "downloading from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file,", "files have been submitted ({len(submissions)} submissions)\") # Wait for completion try: for submission", "the file hash :rtype: bytes :return: the downloaded data :raises ValueError: in case", "disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input))", "exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All", "tau_clients import exceptions from tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash: str) ->", "= None file_paths = [] file_hashes = [] if input_type is decoders.InputType.FILE_HASH: try:", "1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise", "with open(file_path, \"rb\") as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] =", "print(\"VT credentials not found. Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type", "], ) args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load the analysis", "{} for file_path in file_paths: with open(file_path, \"rb\") as f: try: ret =", "({len(submissions)} submissions)\") # Wait for completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts,", "buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError) as", "print(f\"Error '{str(ae)}' when submitting file {file_path}\") for file_hash in file_hashes: try: file_data =", "as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError", "str) -> bytes: \"\"\" Download file from VT. :param vt.Client client: the VT", "from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read", "for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded input into", "= download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError", "ValueError as ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError as ae:", "#!/usr/bin/env python # Copyright 2021 VMware, Inc. # SPDX-License-Identifier: BSD-2 import argparse import", "file_hash: str) -> bytes: \"\"\" Download file from VT. :param vt.Client client: the", "Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {} for file_path in", "is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded", "print(\"Waiting for results interrupted by user\") print(\"Done\") return 0 if __name__ == \"__main__\":", "type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether", "input type\") print(f\"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)} samples\") # Submit", "= io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError) as e:", "print(f\"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)} samples\") # Submit submission_start_ts =", "client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode(", "in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)} file", "ValueError: in case of any error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer)", "Wait for completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid =", "for results interrupted by user\") print(\"Done\") return 0 if __name__ == \"__main__\": sys.exit(main())", "as e: raise ValueError(str(e)) from e def main(): \"\"\"Submit all samples or hashes", "client :param str file_hash: the file hash :rtype: bytes :return: the downloaded data", "when downloading file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file", "vt_client = None file_paths = [] file_hashes = [] if input_type is decoders.InputType.FILE_HASH:", "downloading file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_hash}\")", "raise ValueError(str(e)) from e def main(): \"\"\"Submit all samples or hashes by downloading", "prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted by", "analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs, input_type =", "from tau_clients import decoders from tau_clients import exceptions from tau_clients import nsx_defender def", "for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\") if not task_uuid:", "decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args() conf =", "import io import sys import tau_clients import vt from tau_clients import decoders from", "= configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") #", "file_path in file_paths: with open(file_path, \"rb\") as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache)", "submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file", "def download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\" Download file from VT. :param", "bytes: \"\"\" Download file from VT. :param vt.Client client: the VT client :param", "vt from tau_clients import decoders from tau_clients import exceptions from tau_clients import nsx_defender", "tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted", "action=\"store_true\", default=False, help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH,", "task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for", "e def main(): \"\"\"Submit all samples or hashes by downloading from VT first.\"\"\"", "in file_paths: with open(file_path, \"rb\") as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret)", "file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)}", "here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the cache\",", "Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input", "case of any error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0)", "file {file_path}\") for file_hash in file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret =", "2021 VMware, Inc. # SPDX-License-Identifier: BSD-2 import argparse import configparser import io import", "except configparser.Error: print(\"VT credentials not found. Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs)", "argparse import configparser import io import sys import tau_clients import vt from tau_clients", "decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load", "import sys import tau_clients import vt from tau_clients import decoders from tau_clients import", "\"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except (IOError,", "submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input in", "= [] if input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error:", "ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve: print(f\"Error", "download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\" Download file from VT. :param vt.Client", "the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs, input_type", "for file_path in file_paths: with open(file_path, \"rb\") as f: try: ret = analysis_client.submit_file(f.read(),", "file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except", "task_to_source = {} for file_path in file_paths: with open(file_path, \"rb\") as f: try:", "# Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type", "analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when", ") parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the cache\", )", "analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits,", "for completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid = submission.get(\"task_uuid\")", "decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded input", "vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not found. Hash submissions are disabled\") return", "Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs,", "main(): \"\"\"Submit all samples or hashes by downloading from VT first.\"\"\" parser =", "= file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\") for", "correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt:", "dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\",", "decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input vt_client = None file_paths", "submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except", "first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from", "= {} for file_path in file_paths: with open(file_path, \"rb\") as f: try: ret", "vt.Client, file_hash: str) -> bytes: \"\"\" Download file from VT. :param vt.Client client:", "not found. Hash submissions are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE:", "choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file)", "input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input vt_client =", "hashes by downloading from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\",", "file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input vt_client", "input vt_client = None file_paths = [] file_hashes = [] if input_type is", "was not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis:", "except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\") for file_hash in", "import tau_clients import vt from tau_clients import decoders from tau_clients import exceptions from", "VMware, Inc. # SPDX-License-Identifier: BSD-2 import argparse import configparser import io import sys", "if input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials", "parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser(", "in file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]]", "submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link =", "vt_client.close() print(f\"All files have been submitted ({len(submissions)} submissions)\") # Wait for completion try:", "if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid,", "task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\")", "return buffer.read() except (IOError, vt.APIError) as e: raise ValueError(str(e)) from e def main():", "= decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) # Parse the input vt_client = None", "as ae: print(f\"Error '{str(ae)}' when submitting file {file_path}\") for file_hash in file_hashes: try:", "file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)} file hashes", "when submitting file {file_path}\") for file_hash in file_hashes: try: file_data = download_from_vt(vt_client, file_hash)", "= submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link", "ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae: print(f\"Error", "input_type is decoders.InputType.FILE: for file_input in file_inputs: file_paths.extend(tau_clients.get_file_paths(file_input)) else: raise ValueError(\"Unknown input type\")", "Parse the input vt_client = None file_paths = [] file_hashes = [] if", "from VT. :param vt.Client client: the VT client :param str file_hash: the file", "tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\" Download file", "file hash :rtype: bytes :return: the downloaded data :raises ValueError: in case of", "by downloading from VT first.\"\"\" parser = argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\",", "\"\"\"Submit all samples or hashes by downloading from VT first.\"\"\" parser = argparse.ArgumentParser()", "help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ],", "print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}'", ":param vt.Client client: the VT client :param str file_hash: the file hash :rtype:", "file from VT. :param vt.Client client: the VT client :param str file_hash: the", "are disabled\") return 1 file_hashes.extend(file_inputs) elif input_type is decoders.InputType.FILE: for file_input in file_inputs:", "): task_uuid = submission.get(\"task_uuid\") if not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\")", "file_hashes: try: file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] =", "SPDX-License-Identifier: BSD-2 import argparse import configparser import io import sys import tau_clients import", "submitted ({len(submissions)} submissions)\") # Wait for completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions,", "any error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read()", "default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False,", "submitting file {file_hash}\") if vt_client: vt_client.close() print(f\"All files have been submitted ({len(submissions)} submissions)\")", "# Wait for completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ): task_uuid", "parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) # Load the analysis client analysis_client = nsx_defender.AnalysisClient.from_conf(conf,", "# Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False, ) #", "from tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash: str) -> bytes: \"\"\" Download", "task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\")", "try: file_data = download_from_vt(vt_client, file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash", "ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error '{str(ae)}'", "Inc. # SPDX-License-Identifier: BSD-2 import argparse import configparser import io import sys import", "have been submitted ({len(submissions)} submissions)\") # Wait for completion try: for submission in", "vt.Client client: the VT client :param str file_hash: the file hash :rtype: bytes", "VT. :param vt.Client client: the VT client :param str file_hash: the file hash", "buffer.read() except (IOError, vt.APIError) as e: raise ValueError(str(e)) from e def main(): \"\"\"Submit", "try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not found. Hash submissions", "file_paths = [] file_hashes = [] if input_type is decoders.InputType.FILE_HASH: try: vt_client =", "VT client :param str file_hash: the file hash :rtype: bytes :return: the downloaded", "raise ValueError(\"Unknown input type\") print(f\"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)} samples\")", "the input vt_client = None file_paths = [] file_hashes = [] if input_type", "= [] file_hashes = [] if input_type is decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\",", "BSD-2 import argparse import configparser import io import sys import tau_clients import vt", "decoders from tau_clients import exceptions from tau_clients import nsx_defender def download_from_vt(client: vt.Client, file_hash:", "\"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\",", "= file_hash except ValueError as ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except", ") # Parse the input vt_client = None file_paths = [] file_hashes =", "{len(file_hashes)} file hashes and {len(file_paths)} samples\") # Submit submission_start_ts = analysis_client.get_api_utc_timestamp() submissions =", "bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except exceptions.ApiError as ae: print(f\"Error '{str(ae)}' when submitting", "open(file_path, \"rb\") as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path", "not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\")", "= nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type),", "submissions)\") # Wait for completion try: for submission in analysis_client.yield_completed_tasks( submissions=submissions, start_timestamp=submission_start_ts, ):", "cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args()", "= analysis_client.get_api_utc_timestamp() submissions = [] task_to_source = {} for file_path in file_paths: with", "= tau_clients.get_task_link(task_uuid, prefer_load_balancer=True) print(f\"File '{task_to_source[task_uuid]}' finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results", "sys import tau_clients import vt from tau_clients import decoders from tau_clients import exceptions", "def main(): \"\"\"Submit all samples or hashes by downloading from VT first.\"\"\" parser", "as ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError as ae: print(f\"Error", "except ValueError as ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError as", "nsx_defender.AnalysisClient.from_conf(conf, \"analysis\") # Decode input type file_inputs, input_type = decoders.InputTypeDecoder().decode( arguments=args.input_bits, input_type=decoders.InputType(args.input_type), inspect_content=False,", "\"rb\") as f: try: ret = analysis_client.submit_file(f.read(), bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_path except", "io.BytesIO() client.download_file(file_hash, buffer) buffer.seek(0, 0) return buffer.read() except (IOError, vt.APIError) as e: raise", "= analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve: print(f\"Error '{str(ve)}'", "decoders.InputType.FILE_HASH: try: vt_client = vt.Client(apikey=conf.get(\"vt\", \"apikey\")) except configparser.Error: print(\"VT credentials not found. Hash", "finished analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted by user\") print(\"Done\") return", ":raises ValueError: in case of any error \"\"\" try: buffer = io.BytesIO() client.download_file(file_hash,", "type\") print(f\"Decoded input into {len(file_hashes)} file hashes and {len(file_paths)} samples\") # Submit submission_start_ts", "except (IOError, vt.APIError) as e: raise ValueError(str(e)) from e def main(): \"\"\"Submit all", "Download file from VT. :param vt.Client client: the VT client :param str file_hash:", "not task_uuid: print(f\"File '{task_to_source[task_uuid]}' was not submitted correctly\") else: task_link = tau_clients.get_task_link(task_uuid, prefer_load_balancer=True)", "file_hash) ret = analysis_client.submit_file(file_data, bypass_cache=args.bypass_cache) submissions.append(ret) task_to_source[ret[\"task_uuid\"]] = file_hash except ValueError as ve:", "argparse.ArgumentParser() parser.add_argument( \"-c\", \"--config-file\", dest=\"config_file\", default=\"./data/tau_clients.ini\", type=tau_clients.is_valid_config_file, help=\"read config from here\", ) parser.add_argument(", "decoders.InputType.DIRECTORY, decoders.InputType.FILE_HASH, decoders.InputType.FILE, ], ) args = parser.parse_args() conf = configparser.ConfigParser() conf.read(args.config_file) #", "file_hash except ValueError as ve: print(f\"Error '{str(ve)}' when downloading file {file_hash}\") except exceptions.ApiError", "config from here\", ) parser.add_argument( \"-b\", \"--bypass-cache\", dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass", "dest=\"bypass_cache\", action=\"store_true\", default=False, help=\"whether to bypass the cache\", ) decoders.InputTypeDecoder.add_arguments_to_parser( parser=parser, choices=[ decoders.InputType.DIRECTORY,", "inspect_content=False, ) # Parse the input vt_client = None file_paths = [] file_hashes", "analysis: {task_link}\") except KeyboardInterrupt: print(\"Waiting for results interrupted by user\") print(\"Done\") return 0" ]
[ "[] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts", "rf email, password = rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions = rf.getAllOptions(allPositions)", "entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts) rf.closeAndSave(writer)", "calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts) rf.closeAndSave(writer) print(\"Options", "frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath=", "writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts) rf.closeAndSave(writer) print(\"Options successfully exported to:\", excelPath)", "= [] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls,", "rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames,", "rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls,", "as rf email, password = rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions =", "puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts) rf.closeAndSave(writer) print(\"Options successfully", "rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts) rf.closeAndSave(writer) print(\"Options successfully exported to:\",", "password = rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers =", "rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer,", "= rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers,", "RobinhoodFunctions as rf email, password = rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions", "= rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions)", "rf.loginToRH(email, password) allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor()", "= rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions)", "allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices,", "email, password = rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers", "allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts =", "rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices,", "password) allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner() rf.r.options.spinning_cursor() optionNames,", "import RobinhoodFunctions as rf email, password = rf.getCredentials() rf.loginToRH(email, password) allPositions = []", "= rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts) rf.closeAndSave(writer) print(\"Options successfully exported", "optionNames, entryPrices, calls, puts = rf.getOptionTrades(allPositions) writer, excelPath= rf.writeOptionInfo(frequentTickers, optionNames, entryPrices, calls, puts)", "rf.getCredentials() rf.loginToRH(email, password) allPositions = [] allPositions = rf.getAllOptions(allPositions) frequentTickers = rf.getFrequentTickers(allPositions) rf.r.options.write_spinner()" ]
[ "class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA):", "related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey,", "PermissionManager from django_grainy.handlers import GrainyMixin # Create your models here. \"\"\" These are", "= models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b", "class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", )", "x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\",", "import grainy_model from django_grainy.models import Permission, PermissionManager from django_grainy.handlers import GrainyMixin # Create", "related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass", "django_grainy.models import Permission, PermissionManager from django_grainy.handlers import GrainyMixin # Create your models here.", "tests. There is no need to ever install the \"django_grainy_test\" app in your", "namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\"", "the models used during the django_grainy unit tests. There is no need to", "your models here. \"\"\" These are the models used during the django_grainy unit", "models here. \"\"\" These are the models used during the django_grainy unit tests.", "@grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA):", "class Meta: abstract = True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class", "import models from django_grainy.decorators import grainy_model from django_grainy.models import Permission, PermissionManager from django_grainy.handlers", "models from django_grainy.decorators import grainy_model from django_grainy.models import Permission, PermissionManager from django_grainy.handlers import", "@grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB,", "@grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE)", "name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA):", "parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255)", "here. \"\"\" These are the models used during the django_grainy unit tests. There", "models used during the django_grainy unit tests. There is no need to ever", "Meta: abstract = True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA):", "abstract = True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass", "your project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract = True @grainy_model() class", ") class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class", "models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey, related_name=\"grainy_permissions\", on_delete=models.CASCADE ) objects = PermissionManager()", "GrainyMixin # Create your models here. \"\"\" These are the models used during", "These are the models used during the django_grainy unit tests. There is no", "key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey, related_name=\"grainy_permissions\", on_delete=models.CASCADE ) objects", "\"django_grainy_test\" app in your project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract =", "ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE)", "on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key", "class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\",", "ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX,", "related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model):", "to ever install the \"django_grainy_test\" app in your project \"\"\" class ModelBase(GrainyMixin, models.Model):", "ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\")", "class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\",", "ever install the \"django_grainy_test\" app in your project \"\"\" class ModelBase(GrainyMixin, models.Model): class", "django.db import models from django_grainy.decorators import grainy_model from django_grainy.models import Permission, PermissionManager from", "import GrainyMixin # Create your models here. \"\"\" These are the models used", "on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\",", "pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\",", "ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass", "# Create your models here. \"\"\" These are the models used during the", "django_grainy.decorators import grainy_model from django_grainy.models import Permission, PermissionManager from django_grainy.handlers import GrainyMixin #", "y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key", "= True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model(", "Create your models here. \"\"\" These are the models used during the django_grainy", "= models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class", "need to ever install the \"django_grainy_test\" app in your project \"\"\" class ModelBase(GrainyMixin,", "\"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract = True @grainy_model() class ModelA(ModelBase): name", "namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" )", "ModelBase(GrainyMixin, models.Model): class Meta: abstract = True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255)", "namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA):", "no need to ever install the \"django_grainy_test\" app in your project \"\"\" class", "class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x =", "models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b =", "project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract = True @grainy_model() class ModelA(ModelBase):", "True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(),", "used during the django_grainy unit tests. There is no need to ever install", "\"\"\" These are the models used during the django_grainy unit tests. There is", "the \"django_grainy_test\" app in your project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract", "parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y", "from django_grainy.models import Permission, PermissionManager from django_grainy.handlers import GrainyMixin # Create your models", "is no need to ever install the \"django_grainy_test\" app in your project \"\"\"", "@grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\")", "models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey(", "models.Model): class Meta: abstract = True @grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\")", "APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey, related_name=\"grainy_permissions\", on_delete=models.CASCADE )", "in your project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract = True @grainy_model()", "during the django_grainy unit tests. There is no need to ever install the", "@grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key =", "from django_grainy.handlers import GrainyMixin # Create your models here. \"\"\" These are the", "django_grainy.handlers import GrainyMixin # Create your models here. \"\"\" These are the models", "grainy_model from django_grainy.models import Permission, PermissionManager from django_grainy.handlers import GrainyMixin # Create your", "class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y =", "the django_grainy unit tests. There is no need to ever install the \"django_grainy_test\"", "import Permission, PermissionManager from django_grainy.handlers import GrainyMixin # Create your models here. \"\"\"", "Permission, PermissionManager from django_grainy.handlers import GrainyMixin # Create your models here. \"\"\" These", "= models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE)", "= models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey, related_name=\"grainy_permissions\", on_delete=models.CASCADE ) objects =", "django_grainy unit tests. There is no need to ever install the \"django_grainy_test\" app", "pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model(", "app in your project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta: abstract = True", "b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\")", "@grainy_model() class ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\",", "ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY,", "models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class", "class ModelBase(GrainyMixin, models.Model): class Meta: abstract = True @grainy_model() class ModelA(ModelBase): name =", "install the \"django_grainy_test\" app in your project \"\"\" class ModelBase(GrainyMixin, models.Model): class Meta:", "are the models used during the django_grainy unit tests. There is no need", "class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey, related_name=\"grainy_permissions\", on_delete=models.CASCADE", "There is no need to ever install the \"django_grainy_test\" app in your project", "@grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class ModelC(ModelA): b = models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\",", "from django.db import models from django_grainy.decorators import grainy_model from django_grainy.models import Permission, PermissionManager", "on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key = models.ForeignKey( APIKey, related_name=\"grainy_permissions\",", "= models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission): api_key =", "pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x = models.ForeignKey(ModelX, related_name=\"y\", on_delete=models.CASCADE) @grainy_model(namespace=\"z\", parent=\"y\") class", "namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class", "unit tests. There is no need to ever install the \"django_grainy_test\" app in", ") class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA): pass @grainy_model(namespace=\"custom\", parent=\"x\") class ModelY(ModelA): x", "class ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class", "models.ForeignKey(ModelB, related_name=\"c\", on_delete=models.CASCADE) @grainy_model( namespace=\"dynamic.{value}\", namespace_instance=\"{namespace}.{other_value}\" ) class ModelD(ModelA): pass @grainy_model(namespace=\"x\") class ModelX(ModelA):", "ModelZ(ModelA): y = models.ForeignKey(ModelY, related_name=\"z\", on_delete=models.CASCADE) class APIKey(models.Model): key = models.CharField(max_length=255) class APIKeyPermission(Permission):", "from django_grainy.decorators import grainy_model from django_grainy.models import Permission, PermissionManager from django_grainy.handlers import GrainyMixin", "ModelA(ModelBase): name = models.CharField(max_length=255) @grainy_model(namespace=\"something.arbitrary\") class ModelB(ModelA): pass @grainy_model( namespace=ModelB.Grainy.namespace(), namespace_instance=\"{namespace}.{instance.b.id}.c.{instance.id}\", ) class" ]
[ "DispatcherHandlerStop, Dispatcher from telegram import Update, User, Message, ParseMode from telegram.error import BadRequest", "_ _ __ _ __ _ _ __ __ _ | | _", "CommandHandler, MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram import", "_| | ___ \\ | | \\n\") print(\"\\ `--.| |__ _ _ __", "x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] + \")\",", "(_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/", "'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except", "updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\" _____ _ _ _ _____ __", "for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ ==", "update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData =", "#Tracking Function for Pitney Bowes @run_async def pitneyb(update: Update, context: CallbackContext): if update.message!=None:", "| | | || (_) | |_/ / (_) | |_ \\n\") print(\"\\____/|_|", "json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'],", "___ | |_/ / ___ | |_ \\n\") print(\" `--. \\ '_ \\|", "\\___/ \\__|\\n\") print(\" | | | | __/ | \\n\") print(\" |_| |_|", "status = jsonData['status'] history = [] for x in jsonData['events']: history.append([ 'Date: '+", "'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData", "E-Kart Logistics @run_async def ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1]", "parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\")", "= requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status:", "] except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description:", "history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' -", "@run_async def canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response =", "Function for Pitney Bowes @run_async def pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID", "in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location:", "you latest tracking info on your package.\\n\\nUse the following commands to access your", "\", \" + x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+", "history = [] for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated:", "(update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'],", "for Canada Post @run_async def canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID =", "ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data = [] session", "jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] +", "'+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i]))", "Pitney Bowes @run_async def pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1]", "\" + x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn']", "Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async def", "your package tracking info.\") def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler", "pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling()", "requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status'] history = [] for x in", "\"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI can provide you", "= (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status'] history =", "Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError:", "history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = []", "| '_ \\| '_ \\| | '_ \\ / _` || || '_", "= json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description:", "|_ \\n\") print(\" `--. \\ '_ \\| | '_ \\| '_ \\| |", "_| / _| | ___ \\ | | \\n\") print(\"\\ `--.| |__ _", "Update, User, Message, ParseMode from telegram.error import BadRequest import requests_html import requests import", "'+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \" +", "Bowes @run_async def pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response", "def canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\")", "/start @run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey", "= CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost)", "| |_ \\n\") print(\" `--. \\ '_ \\| | '_ \\| '_ \\|", "import logging #Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\",", "pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\",", "for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id,", "jsonData = json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'],", "historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start", "Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data = [] session =", "CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler)", "json import logging #Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher", "' ] currentStatusData = \"\\n\".join(currentStatusData) history = [] for x in jsonData['scanHistory']['scanDetails']: try:", "(update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status'] history = []", "handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney", "Message /start @run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id,", "commands to access your package tracking info.\") def main(): start_handler = CommandHandler(\"start\", start)", "__ __ _ | | _ __ | |_ ___ | |_/ /", "(_) |_ _| / _| | ___ \\ | | \\n\") print(\"\\ `--.|", "| |_) | |_) | | | | | (_| || || |", "_ __ __ _ | | _ __ | |_ ___ | |_/", "|_ ___ | |_/ / ___ | |_ \\n\") print(\" `--. \\ '_", "Canada Post @run_async def canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1]", "context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text)", "= jsonData['status'] history = [] for x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date']", "E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler", "history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \",", "canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\" _____", "import requests_html import requests import json import logging #Enter API-KEY here updater =", "%(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async def", "x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+", "dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking", "logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for E-Kart", "\\| __|\\n\") print(\"/\\__/ / | | | | |_) | |_) | |", "| \\n\") print(\"\\ `--.| |__ _ _ __ _ __ _ _ __", "#Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s -", "\\n\") print(\"/ ___| | (_) (_) |_ _| / _| | ___ \\", "CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler)", "x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd']", "*\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async def canadapost(update:", "@run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there!", "x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] + \"", "Info Bot!\\nI can provide you latest tracking info on your package.\\n\\nUse the following", "Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError:", "range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN)", "_____ __ ______ _ \\n\") print(\"/ ___| | (_) (_) |_ _| /", "from telegram.ext import Updater, CallbackContext, CommandHandler, MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async,", "\\ '_ \\| | '_ \\| '_ \\| | '_ \\ / _`", "KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData =", "|_/ / (_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_|", "Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async", "/ _| | ___ \\ | | \\n\") print(\"\\ `--.| |__ _ _", "= requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated:", "\"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData) history = [] for x in", "|_) | | | | | (_| || || | | | ||", "[ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' -", "= CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb)", "i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\",", "/ _` || || '_ \\| _/ _ \\| ___ \\/ _ \\|", "there! I'm Shipping Info Bot!\\nI can provide you latest tracking info on your", "'+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ]", "\\__|\\n\") print(\" | | | | __/ | \\n\") print(\" |_| |_| |___/", "\\/ _ \\| __|\\n\") print(\"/\\__/ / | | | | |_) | |_)", "dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler", "\\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/ / | | | | |_)", "'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode']", "updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for", "| | | | __/ | \\n\") print(\" |_| |_| |___/ \") main()", "Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\"", "- ' ] currentStatusData = \"\\n\".join(currentStatusData) history = [] for x in jsonData['scanHistory']['scanDetails']:", "(update.message.text).split()[1] data = [] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in", "except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData", "\"\\n\".join(currentStatusData) history = [] for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last", "update.message!=None: trackingID = (update.message.text).split()[1] data = [] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\")", "= [] for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+'", "requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+'", "Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\":", "text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes", "Logistics @run_async def ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data", "dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler", "trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status'] history", "handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__", "___ \\ | | \\n\") print(\"\\ `--.| |__ _ _ __ _ __", "level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async def ekart(update: Update, context: CallbackContext): if", "def ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data = []", "def pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID)", "currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\",", "_ \\| __|\\n\") print(\"/\\__/ / | | | | |_) | |_) |", "'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = [] for", "print(\"/\\__/ / | | | | |_) | |_) | | | |", "from telegram.error import BadRequest import requests_html import requests import json import logging #Enter", "- %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async def ekart(update:", "session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\",", "start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping", "__ _ _ __ __ _ | | _ __ | |_ ___", "Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram import Update, User, Message,", "history = [] for x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time']", "'+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status:", "I'm Shipping Info Bot!\\nI can provide you latest tracking info on your package.\\n\\nUse", "Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async def pitneyb(update: Update, context:", "Dispatcher from telegram import Update, User, Message, ParseMode from telegram.error import BadRequest import", "data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for", "[] for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'],", "'+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData", "_ \\n\") print(\"/ ___| | (_) (_) |_ _| / _| | ___", "+ \")\", 'Description: '+ x['descEn'] ]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del", "\" (\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ]) currentStatusData = history[0]", "|| | | | || (_) | |_/ / (_) | |_ \\n\")", "= \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for", "run_async, DispatcherHandlerStop, Dispatcher from telegram import Update, User, Message, ParseMode from telegram.error import", "'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData) history =", "#Command handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for", "jsonData['status'] history = [] for x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] +", "___ \\/ _ \\| __|\\n\") print(\"/\\__/ / | | | | |_) |", "response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last", "x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] + \" (\" +", "\"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'],", "telegram.error import BadRequest import requests_html import requests import json import logging #Enter API-KEY", "import run_async, DispatcherHandlerStop, Dispatcher from telegram import Update, User, Message, ParseMode from telegram.error", "try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+'", "_ _____ __ ______ _ \\n\") print(\"/ ___| | (_) (_) |_ _|", "== \"__main__\": print(\" _____ _ _ _ _____ __ ______ _ \\n\") print(\"/", "response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function", "| (_) (_) |_ _| / _| | ___ \\ | | \\n\")", "print(\"\\ `--.| |__ _ _ __ _ __ _ _ __ __ _", "Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData =", "for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking", "= context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI can provide you latest tracking", "access your package tracking info.\") def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command", "]) historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id,", "'+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = [] for i", "(\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ]) currentStatusData = history[0] currentStatusData", "`--. \\ '_ \\| | '_ \\| '_ \\| | '_ \\ /", "/ | | | | |_) | |_) | | | | |", "CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data = [] session = requests_html.HTMLSession() response", "'+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] +", "if update.message!=None: trackingID = (update.message.text).split()[1] data = [] session = requests_html.HTMLSession() response =", "| | (_| || || | | | || (_) | |_/ /", "currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData = [] for i", "telegram import Update, User, Message, ParseMode from telegram.error import BadRequest import requests_html import", "_ _ __ __ _ | | _ __ | |_ ___ |", "'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \"", "'Location: '+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd']", "Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def start(update: Update, context:", "__name__ == \"__main__\": print(\" _____ _ _ _ _____ __ ______ _ \\n\")", "Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler", "'_ \\ / _` || || '_ \\| _/ _ \\| ___ \\/", "from telegram import Update, User, Message, ParseMode from telegram.error import BadRequest import requests_html", "reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id,", "- %(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async", "|_ _| / _| | ___ \\ | | \\n\") print(\"\\ `--.| |__", "start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\",", "+ \" (\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ]) currentStatusData =", "| | | (_| || || | | | || (_) | |_/", "ParseMode from telegram.error import BadRequest import requests_html import requests import json import logging", "'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData) history = [] for", "trackingID = (update.message.text).split()[1] data = [] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for", "def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm", "_ _ _ _____ __ ______ _ \\n\") print(\"/ ___| | (_) (_)", "*\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async def", "= updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function", "print(\" `--. \\ '_ \\| | '_ \\| '_ \\| | '_ \\", "'+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ]", "| | || (_) | |_/ / (_) | |_ \\n\") print(\"\\____/|_| |_|_|", "CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status", "'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except", "Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData =", "response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status'] history = [] for", "= [] for x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] +", "reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async def pitneyb(update: Update, context: CallbackContext):", "Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info", "in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id,", "historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function", "text=\"Hey there! I'm Shipping Info Bot!\\nI can provide you latest tracking info on", "'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'],", "context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI", "'+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData) history = []", "import Updater, CallbackContext, CommandHandler, MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher", "\"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async def pitneyb(update: Update,", "]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData = [] for", "logging #Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s", "Function for Canada Post @run_async def canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID", "telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram import Update, User, Message, ParseMode from", "'+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last", "if __name__ == \"__main__\": print(\" _____ _ _ _ _____ __ ______ _", "(_| || || | | | || (_) | |_/ / (_) |", "except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'],", "jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\",", "__|\\n\") print(\"/\\__/ / | | | | |_) | |_) | | |", "'_ \\| '_ \\| | '_ \\ / _` || || '_ \\|", "_ \\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/ / | | | |", "*\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def start(update: Update,", "'+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData)", "'_ \\| | '_ \\| '_ \\| | '_ \\ / _` ||", "on your package.\\n\\nUse the following commands to access your package tracking info.\") def", "\"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated:", "can provide you latest tracking info on your package.\\n\\nUse the following commands to", "'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated:", "| | | | | (_| || || | | | || (_)", "your package.\\n\\nUse the following commands to access your package tracking info.\") def main():", "canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\" _____ _ _ _", "Shipping Info Bot!\\nI can provide you latest tracking info on your package.\\n\\nUse the", "context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI can provide you latest tracking info", "\\ / _` || || '_ \\| _/ _ \\| ___ \\/ _", "BadRequest import requests_html import requests import json import logging #Enter API-KEY here updater", "'+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData) history", "'+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [ 'Status:", "try: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location:", "= (update.message.text).split()[1] data = [] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector", "__ | |_ ___ | |_/ / ___ | |_ \\n\") print(\" `--.", "'+x['eventDescription'], ]) historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData)", "handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada", "| |_ ___ | |_/ / ___ | |_ \\n\") print(\" `--. \\", "_/ _ \\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/ / | | |", "= CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart)", "'+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData", "for E-Kart Logistics @run_async def ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID =", "print(\" _____ _ _ _ _____ __ ______ _ \\n\") print(\"/ ___| |", "parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async def pitneyb(update: Update, context: CallbackContext): if", "'+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+'", "|| || '_ \\| _/ _ \\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/", "\"\\n\".join(currentStatusData) del history[0] historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData =", "import BadRequest import requests_html import requests import json import logging #Enter API-KEY here", "from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram import Update, User, Message, ParseMode", "package tracking info.\") def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for", "Post @run_async def canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response", "\\n\") print(\" `--. \\ '_ \\| | '_ \\| '_ \\| | '_", ".__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | | | |", "package.\\n\\nUse the following commands to access your package tracking info.\") def main(): start_handler", "[] for x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'],", "_ __ | |_ ___ | |_/ / ___ | |_ \\n\") print(\"", "canadapost(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData", "tracking info.\") def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart", "CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\" _____ _ _", "#Command handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for", "update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status']", "| |_/ / ___ | |_ \\n\") print(\" `--. \\ '_ \\| |", "x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'],", "'+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData =", "|__ _ _ __ _ __ _ _ __ __ _ | |", "| _ __ | |_ ___ | |_/ / ___ | |_ \\n\")", "dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\" _____ _ _ _ _____", "history[0] historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id,", "KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location:", "+ x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] +", "%(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async def ekart(update: Update, context: CallbackContext):", "__ ______ _ \\n\") print(\"/ ___| | (_) (_) |_ _| / _|", "format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics", "CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try:", "______ _ \\n\") print(\"/ ___| | (_) (_) |_ _| / _| |", "text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def", "\"__main__\": print(\" _____ _ _ _ _____ __ ______ _ \\n\") print(\"/ ___|", "provide you latest tracking info on your package.\\n\\nUse the following commands to access", "\\| '_ \\| | '_ \\ / _` || || '_ \\| _/", "[] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest", "+ x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ])", "reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async def canadapost(update: Update, context: CallbackContext):", "| | _ __ | |_ ___ | |_/ / ___ | |_", "to access your package tracking info.\") def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler)", "start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command", "Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler =", "trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData = [", "jsonData = json.loads(response.text) status = jsonData['status'] history = [] for x in jsonData['events']:", "(_) (_) |_ _| / _| | ___ \\ | | \\n\") print(\"\\", "telegram.ext import Updater, CallbackContext, CommandHandler, MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop,", "json.loads(response.text) status = jsonData['status'] history = [] for x in jsonData['events']: history.append([ 'Date:", "= \"\\n\".join(currentStatusData) history = [] for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'],", "the following commands to access your package tracking info.\") def main(): start_handler =", "dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle()", "|| (_) | |_/ / (_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_|", "for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes", "#Tracking Function for E-Kart Logistics @run_async def ekart(update: Update, context: CallbackContext): if update.message!=None:", "MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram import Update,", "Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = [] for i in range(len(history)):", "CallbackContext, CommandHandler, MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram", "= (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData = [ 'Status:", "parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async def canadapost(update: Update, context: CallbackContext): if", "Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from telegram import Update, User,", "del history[0] historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData)", "history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData = [] for i in range(len(history)):", "Bot!\\nI can provide you latest tracking info on your package.\\n\\nUse the following commands", "currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i]))", "if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status =", "/ (_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/", "session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping", "x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData)", "- '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+'", "Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async def canadapost(update: Update,", "context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data = [] session = requests_html.HTMLSession()", "use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)", "in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking", "updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s", "for Pitney Bowes @run_async def pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID =", "= history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData = [] for i in", "historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking", "'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ]) historyData = [] for i in", "\\| _/ _ \\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/ / | |", "\")\", 'Description: '+ x['descEn'] ]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0]", "\\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" |", "+ \", \" + x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] + \")\", 'Description:", "Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler =", "| | | | |_) | |_) | | | | | (_|", "%(levelname)s - %(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async def ekart(update: Update,", "| ___ \\ | | \\n\") print(\"\\ `--.| |__ _ _ __ _", "context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post", "CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler)", "|_) | |_) | | | | | (_| || || | |", "Start Message /start @run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg =", "| |_/ / (_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_|", "Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData =", "/ ___ | |_ \\n\") print(\" `--. \\ '_ \\| | '_ \\|", "Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def start(update:", "__ _ | | _ __ | |_ ___ | |_/ / ___", "'+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ])", "import Update, User, Message, ParseMode from telegram.error import BadRequest import requests_html import requests", "|_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | | |", "(_) | |_/ / (_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__,", "\\| | '_ \\ / _` || || '_ \\| _/ _ \\|", "_ __ _ _ __ __ _ | | _ __ | |_", "Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async def start(update: Update, context: CallbackContext):", "main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler =", "print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | |", "]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], ])", "historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping", "= CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if __name__ == \"__main__\": print(\" _____ _", "if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData = json.loads(response.text) try: currentStatusData", "'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [", "= requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text) status = jsonData['status'] history = [] for x", "#Command handler for Canada Post canadapost_handler = CommandHandler(\"canadapost\", canadapost) dispatcher.add_handler(canadapost_handler) updater.start_polling() updater.idle() if", "CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI can", "\\___/\\____/ \\___/ \\__|\\n\") print(\" | | | | __/ | \\n\") print(\" |_|", "|| || | | | || (_) | |_/ / (_) | |_", "\\n\") print(\"\\ `--.| |__ _ _ __ _ __ _ _ __ __", "updater.idle() if __name__ == \"__main__\": print(\" _____ _ _ _ _____ __ ______", "= \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message", "for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description:", "Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',", "= [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+'", "| '_ \\ / _` || || '_ \\| _/ _ \\| ___", "following commands to access your package tracking info.\") def main(): start_handler = CommandHandler(\"start\",", "for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command handler for Canada Post", "_____ _ _ _ _____ __ ______ _ \\n\") print(\"/ ___| | (_)", "'+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([", "info on your package.\\n\\nUse the following commands to access your package tracking info.\")", "- %(message)s', level=logging.INFO) #Tracking Function for E-Kart Logistics @run_async def ekart(update: Update, context:", "'_ \\| _/ _ \\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/ / |", "] currentStatusData = \"\\n\".join(currentStatusData) history = [] for x in jsonData['scanHistory']['scanDetails']: try: history.append([", "context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI can provide", "latest tracking info on your package.\\n\\nUse the following commands to access your package", "\\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | | | | __/ | \\n\")", "| |_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\")", "'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode']", "'+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - ' ] currentStatusData = \"\\n\".join(currentStatusData) history = [] for x", "Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney Bowes @run_async def pitneyb(update:", "import json import logging #Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher =", "\\| | '_ \\| '_ \\| | '_ \\ / _` || ||", "x['locationAddr']['regionCd'] + \" (\" + x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ]) currentStatusData", "\"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start", "| | | | (_| || || | | | || (_) |", "@run_async def pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response =", "[] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id,", "- '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description:", "|_ \\n\") print(\"\\____/|_| |_|_| .__/| .__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\"", "__ _ __ _ _ __ __ _ | | _ __ |", "___| | (_) (_) |_ _| / _| | ___ \\ | |", "'Description: '+x['eventDescription'], 'Location: '+x['eventLocation']['city']+\", \"+x['eventLocation']['countyOrRegion']+' - '+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'],", "data = [] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'):", "`--.| |__ _ _ __ _ __ _ _ __ __ _ |", "| | |_) | |_) | | | | | (_| || ||", "x['descEn'] ]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData = []", "'+x['eventLocation']['postalOrZipCode'] ]) except KeyError: history.append([ 'Status: '+x['packageStatus'], 'Last Updated: '+x['eventDate']+' '+x['eventTime'], 'Description: '+x['eventDescription'],", "@run_async def ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] data =", "| |_) | | | | | (_| || || | | |", "+ x['locationAddr']['countryCd'] + \")\", 'Description: '+ x['descEn'] ]) currentStatusData = history[0] currentStatusData =", "|_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | | | | __/ |", "'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'], 'Description: '+jsonData['currentStatus']['eventDescription'], 'Location: '+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '", "Message, ParseMode from telegram.error import BadRequest import requests_html import requests import json import", "context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Pitney", "\"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada", "| (_| || || | | | || (_) | |_/ / (_)", "requests import json import logging #Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher", "import requests import json import logging #Enter API-KEY here updater = Updater(\"API-KEY\", use_context=True)", "= \"\\n\".join(currentStatusData) del history[0] historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData", "ekart_handler = CommandHandler(\"ekart\", ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\",", "Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async def canadapost(update: Update, context:", "API-KEY here updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s", "_ __ _ __ _ _ __ __ _ | | _ __", "def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics ekart_handler", "'_ \\| | '_ \\ / _` || || '_ \\| _/ _", "for x in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location:", "|_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | | | | __/ | \\n\") print(\"", "text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Tracking Function for Canada Post @run_async", "selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking Info:*\\n\\n`\"+data[1]+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN)", "response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:", "requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest", "#Tracking Function for Canada Post @run_async def canadapost(update: Update, context: CallbackContext): if update.message!=None:", "___ | |_ \\n\") print(\" `--. \\ '_ \\| | '_ \\| '_", "\\ | | \\n\") print(\"\\ `--.| |__ _ _ __ _ __ _", "| || (_) | |_/ / (_) | |_ \\n\") print(\"\\____/|_| |_|_| .__/|", "'+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last Updated: '+jsonData['currentStatus']['eventDate']+' '+jsonData['currentStatus']['eventTime'],", "= [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status:", "= session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status: \"+data[0]+\"`\\n\\n*Tracking", "context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://www.canadapost.ca/trackweb/rs/track/json/package/\"+trackingID+\"/detail\") jsonData = json.loads(response.text)", "+ x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city'] + \", \" + x['locationAddr']['regionCd'] + \" (\"", "'Description: '+ x['descEn'] ]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData", "_ | | _ __ | |_ ___ | |_/ / ___ |", "_` || || '_ \\| _/ _ \\| ___ \\/ _ \\| __|\\n\")", "print(\" | | | | __/ | \\n\") print(\" |_| |_| |___/ \")", "|_/ / ___ | |_ \\n\") print(\" `--. \\ '_ \\| | '_", "context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot Start Message /start @run_async", "'+jsonData['currentStatus']['eventLocation']['city']+\", \"+jsonData['currentStatus']['eventLocation']['countyOrRegion']+' - '+jsonData['currentStatus']['eventLocation']['postalOrZipCode'] ] except KeyError: currentStatusData = [ 'Status: '+jsonData['currentStatus']['packageStatus'], 'Last", "tracking info on your package.\\n\\nUse the following commands to access your package tracking", "_ _ _____ __ ______ _ \\n\") print(\"/ ___| | (_) (_) |_", "requests_html import requests import json import logging #Enter API-KEY here updater = Updater(\"API-KEY\",", "= [] session = requests_html.HTMLSession() response = session.get(\"https://ekartlogistics.com/track/\"+str(trackingID)+\"/\") for selector in response.html.xpath('//div[@id=\"no-more-tables\"][1]/table/tbody'): data.append(selector.text)", "ekart) dispatcher.add_handler(ekart_handler) #Command handler for Pitney Bowes pitneyb_handler = CommandHandler(\"pitneyb\", pitneyb) dispatcher.add_handler(pitneyb_handler) #Command", ".__/|_|_| |_|\\__, \\___/_| |_|_| \\___/\\____/ \\___/ \\__|\\n\") print(\" | | | | __/", "| | \\n\") print(\"\\ `--.| |__ _ _ __ _ __ _ _", "historyData.append(\"\\n\".join(history[i])) historyData = \"\\n\\n\".join(historyData) context.bot.send_message(chat_id=update.effective_chat.id, text=\"*Shipping Status: *\\n\\n`Latest Status:\\n\"+currentStatusData+\"`\\n\\n*Tracking Info:*\\n\\n`\"+historyData+\"`\", reply_to_message_id=update.message.message_id, parse_mode=ParseMode.MARKDOWN) #Bot", "here updater = Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s -", "currentStatusData = \"\\n\".join(currentStatusData) history = [] for x in jsonData['scanHistory']['scanDetails']: try: history.append([ 'Status:", "'+ x['descEn'] ]) currentStatusData = history[0] currentStatusData = \"\\n\".join(currentStatusData) del history[0] historyData =", "print(\"/ ___| | (_) (_) |_ _| / _| | ___ \\ |", "Function for E-Kart Logistics @run_async def ekart(update: Update, context: CallbackContext): if update.message!=None: trackingID", "= json.loads(response.text) status = jsonData['status'] history = [] for x in jsonData['events']: history.append([", "pitneyb(update: Update, context: CallbackContext): if update.message!=None: trackingID = (update.message.text).split()[1] response = requests.get(\"https://parceltracking.pb.com/ptsapi/track-packages/\"+trackingID) jsonData", "'Description: '+x['eventDescription'], ]) historyData = [] for i in range(len(history)): historyData.append(\"\\n\".join(history[i])) historyData =", "in jsonData['events']: history.append([ 'Date: '+ x['datetime']['date'] + x['datetime']['time'] + x['datetime']['zoneOffset'], 'Location: '+ x['locationAddr']['city']", "| | | |_) | |_) | | | | | (_| ||", "cmd_msg = context.bot.send_message(chat_id=update.effective_chat.id, text=\"Hey there! I'm Shipping Info Bot!\\nI can provide you latest", "User, Message, ParseMode from telegram.error import BadRequest import requests_html import requests import json", "info.\") def main(): start_handler = CommandHandler(\"start\", start) dispatcher.add_handler(start_handler) #Command handler for E-Kart Logistics", "= Updater(\"API-KEY\", use_context=True) dispatcher = updater.dispatcher logging.basicConfig(filename=\"shipping.log\", format='%(asctime)s - %(name)s - %(levelname)s -", "|| '_ \\| _/ _ \\| ___ \\/ _ \\| __|\\n\") print(\"/\\__/ /", "#Bot Start Message /start @run_async def start(update: Update, context: CallbackContext): context.bot.sendChatAction(update.effective_chat.id, \"typing\") cmd_msg", "Updater, CallbackContext, CommandHandler, MessageHandler, Filters, Handler from telegram.ext.dispatcher import run_async, DispatcherHandlerStop, Dispatcher from" ]
[ "flip(self, node): if not node: return None hold_node = node.left node.left = node.right", "self.right = right class Solution(object): def flip(self, node): if not node: return None", "class Solution(object): def flip(self, node): if not node: return None hold_node = node.left", "tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val", "binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): #", "= left # self.right = right class Solution(object): def flip(self, node): if not", "right=None): # self.val = val # self.left = left # self.right = right", "Solution(object): def flip(self, node): if not node: return None hold_node = node.left node.left", "class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val #", "TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left", "# self.right = right class Solution(object): def flip(self, node): if not node: return", "= node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type root:", "val=0, left=None, right=None): # self.val = val # self.left = left # self.right", "= hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type root: TreeNode :rtype: TreeNode", "def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left", "node.left = node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type", "node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type root: TreeNode", "hold_node = node.left node.left = node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self,", "hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type root: TreeNode :rtype: TreeNode \"\"\"", "for a binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None,", "node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type root: TreeNode :rtype:", "node): if not node: return None hold_node = node.left node.left = node.right node.right", "# Definition for a binary tree node. # class TreeNode(object): # def __init__(self,", "<gh_stars>0 # Definition for a binary tree node. # class TreeNode(object): # def", "not node: return None hold_node = node.left node.left = node.right node.right = hold_node", "self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\" :type root: TreeNode :rtype: TreeNode \"\"\" self.flip(root)", "= right class Solution(object): def flip(self, node): if not node: return None hold_node", "def flip(self, node): if not node: return None hold_node = node.left node.left =", "a binary tree node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None):", "# self.val = val # self.left = left # self.right = right class", "self.val = val # self.left = left # self.right = right class Solution(object):", "= val # self.left = left # self.right = right class Solution(object): def", "self.left = left # self.right = right class Solution(object): def flip(self, node): if", "= node.left node.left = node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root):", "def invertTree(self, root): \"\"\" :type root: TreeNode :rtype: TreeNode \"\"\" self.flip(root) return root", "return None hold_node = node.left node.left = node.right node.right = hold_node self.flip(node.left) self.flip(node.right)", "# def __init__(self, val=0, left=None, right=None): # self.val = val # self.left =", "None hold_node = node.left node.left = node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def", "left=None, right=None): # self.val = val # self.left = left # self.right =", "right class Solution(object): def flip(self, node): if not node: return None hold_node =", "val # self.left = left # self.right = right class Solution(object): def flip(self,", "node: return None hold_node = node.left node.left = node.right node.right = hold_node self.flip(node.left)", "__init__(self, val=0, left=None, right=None): # self.val = val # self.left = left #", "left # self.right = right class Solution(object): def flip(self, node): if not node:", "self.flip(node.right) def invertTree(self, root): \"\"\" :type root: TreeNode :rtype: TreeNode \"\"\" self.flip(root) return", "if not node: return None hold_node = node.left node.left = node.right node.right =", "# self.left = left # self.right = right class Solution(object): def flip(self, node):", "Definition for a binary tree node. # class TreeNode(object): # def __init__(self, val=0,", "# class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val = val", "node.left node.left = node.right node.right = hold_node self.flip(node.left) self.flip(node.right) def invertTree(self, root): \"\"\"", "node. # class TreeNode(object): # def __init__(self, val=0, left=None, right=None): # self.val =" ]
[ "* from statistics.variance import variance, standard_deviation from statistics.correlation import covariance, correlation def main():", "outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds,", "9, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1,", "import covariance, correlation def main(): num_friends = [500, 50, 25, 30, 5, 6,", "[x for i, x in enumerate(num_friends) if i != outlier] daily_minutes_good = [x", "25, 30, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5,", "5]) == 5 vector_a = [1, 9, 2, 10] assert median(vector_a) == (2", "= variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]):", "covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]):", "x in enumerate(num_friends) if i != outlier] daily_minutes_good = [x for i, x", "daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good", "10, 19, 28, 37, 46, 55, 64, 73, 82, 91, 10, 19, 28,", "37, 46, 55, 64, 73, 82, 91, 10, 19, 28, 37, 33, 55,", "= num_friends.index(500) num_friends_good = [x for i, x in enumerate(num_friends) if i !=", "== (2 + 9) / 2 print(median(vector_a)) print(4//2) # 2 print(9//2) # 4", "quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4", "result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def", "central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9, 5]) == 5 vector_a = [1,", "print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1,", "print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends))", "19, 28, 37, 46, 55, 64, 73, 82, 91, 10, 19, 28, 37,", "19, 28, 37, 33, 55, 64, 73, 82, 91, 10] daily_minutes = [1,", "== 5 vector_a = [1, 9, 2, 10] assert median(vector_a) == (2 +", "corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends)", "daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good = [x for i, x in enumerate(num_friends)", "4, 9, 12, 8, 9, 20, 5, 6, 10, 20, 4, 9, 12,", "plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float],", "{standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9, 5]) == 5 vector_a", "9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] central_tendencies(num_friends)", "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3,", "\"\"\" Some exercises about statistics \"\"\" from matplotlib import pyplot as plt from", "12, 8, 9, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9,", "statistics.central_tendencies import * from statistics.variance import variance, standard_deviation from statistics.correlation import covariance, correlation", "8, 9, 20, 5, 6, 10, 20, 4, 9, 12, 8, 9, 20,", "90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs(): plt.show() if __name__ ==", "with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov =", "cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def", "0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 =", "pyplot as plt from statistics.central_tendencies import * from statistics.variance import variance, standard_deviation from", "!= outlier] daily_minutes_good = [x for i, x in enumerate(daily_minutes) if i !=", "9, 5]) == 5 vector_a = [1, 9, 2, 10] assert median(vector_a) ==", "print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs(): plt.show() if __name__", "10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 19, 28,", "statistics.correlation import covariance, correlation def main(): num_friends = [500, 50, 25, 30, 5,", "82, 91, 10, 19, 28, 37, 33, 55, 64, 73, 82, 91, 10]", "37, 33, 55, 64, 73, 82, 91, 10] daily_minutes = [1, 6, 10,", "10, 20, 4, 9, 12, 8, 9, 20, 5, 6, 10, 20, 4,", "x in enumerate(daily_minutes) if i != outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation", "!= outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\")", "plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes)", "75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda:", "{result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs(): plt.show() if __name__ == \"__main__\":", "def central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9, 5]) == 5 vector_a =", "55, 64, 73, 82, 91, 10] daily_minutes = [1, 6, 10, 20, 4,", "8, 9, 10, 19, 28, 37, 46, 55, 64, 73, 82, 91, 10,", "8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]", "List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}')", "9, 2, 10] assert median(vector_a) == (2 + 9) / 2 print(median(vector_a)) print(4//2)", "plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds:", "= [x for i, x in enumerate(num_friends) if i != outlier] daily_minutes_good =", "plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good = [x for", "5, 6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs()", "correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good =", "exercises about statistics \"\"\" from matplotlib import pyplot as plt from statistics.central_tendencies import", "i, x in enumerate(num_friends) if i != outlier] daily_minutes_good = [x for i,", "{result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile", "variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert", "friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}')", "9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 19,", "[500, 50, 25, 30, 5, 6, 7, 8, 9, 10, 1, 2, 3,", "8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float],", "outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of", "of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\")", "8, 9, 20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,", "+ 9) / 2 print(median(vector_a)) print(4//2) # 2 print(9//2) # 4 result_q1 =", "55, 64, 73, 82, 91, 10, 19, 28, 37, 33, 55, 64, 73,", "10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends)", "= standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9,", "dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation:", "print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends,", "# 2 print(9//2) # 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2", "6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def", "plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good)", "about statistics \"\"\" from matplotlib import pyplot as plt from statistics.central_tendencies import *", "assert median([1, 10, 2, 9, 5]) == 5 vector_a = [1, 9, 2,", "plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation", "7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends:", "= [x for i, x in enumerate(daily_minutes) if i != outlier] # plotting", "deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9, 5]) == 5", "List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good = [x for i, x in", "List[float]): assert median([1, 10, 2, 9, 5]) == 5 vector_a = [1, 9,", "5 vector_a = [1, 9, 2, 10] assert median(vector_a) == (2 + 9)", "from matplotlib import pyplot as plt from statistics.central_tendencies import * from statistics.variance import", "correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500)", "{result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile", "{varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10,", "statistics \"\"\" from matplotlib import pyplot as plt from statistics.central_tendencies import * from", "0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 =", "enumerate(num_friends) if i != outlier] daily_minutes_good = [x for i, x in enumerate(daily_minutes)", "20, 5, 6, 10, 20, 4, 9, 12, 8, 9, 20, 1, 2,", "print(4//2) # 2 print(9//2) # 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}')", "plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good,", "7, 8, 9, 10, 19, 28, 37, 46, 55, 64, 73, 82, 91,", "50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90)", "result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%:", "12, 8, 9, 20, 5, 6, 10, 20, 4, 9, 12, 8, 9,", "\"\"\" from matplotlib import pyplot as plt from statistics.central_tendencies import * from statistics.variance", "1, 2, 3, 4, 5, 6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends,", "3, 4, 5, 6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends,", "def correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr =", "correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}')", "8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,", "i, x in enumerate(daily_minutes) if i != outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes)", "without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"#", "46, 55, 64, 73, 82, 91, 10, 19, 28, 37, 33, 55, 64,", "64, 73, 82, 91, 10, 19, 28, 37, 33, 55, 64, 73, 82,", "5, 6, 7, 8, 9, 10, 19, 28, 37, 46, 55, 64, 73,", "[1, 6, 10, 20, 4, 9, 12, 8, 9, 20, 5, 6, 10,", "in enumerate(daily_minutes) if i != outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without", "= [1, 9, 2, 10] assert median(vector_a) == (2 + 9) / 2", "2, 9, 5]) == 5 vector_a = [1, 9, 2, 10] assert median(vector_a)", "correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good = [x for i, x", "plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov", "= quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}')", "quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3", "82, 91, 10] daily_minutes = [1, 6, 10, 20, 4, 9, 12, 8,", "assert median(vector_a) == (2 + 9) / 2 print(median(vector_a)) print(4//2) # 2 print(9//2)", "print(median(vector_a)) print(4//2) # 2 print(9//2) # 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%:", "outlier = num_friends.index(500) num_friends_good = [x for i, x in enumerate(num_friends) if i", "from statistics.correlation import covariance, correlation def main(): num_friends = [500, 50, 25, 30,", "print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends,", "standard_deviation from statistics.correlation import covariance, correlation def main(): num_friends = [500, 50, 25,", "for i, x in enumerate(num_friends) if i != outlier] daily_minutes_good = [x for", "import * from statistics.variance import variance, standard_deviation from statistics.correlation import covariance, correlation def", "median([1, 10, 2, 9, 5]) == 5 vector_a = [1, 9, 2, 10]", "0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 =", "6, 10, 20, 4, 9, 12, 8, 9, 20, 5, 6, 10, 20,", "quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs(): plt.show()", "{corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends)", "num_friends.index(500) num_friends_good = [x for i, x in enumerate(num_friends) if i != outlier]", "50, 25, 30, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4,", "print(9//2) # 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends,", "5, 6, 10, 20, 4, 9, 12, 8, 9, 20, 1, 2, 3,", "10, 19, 28, 37, 33, 55, 64, 73, 82, 91, 10] daily_minutes =", "print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian", "daily_minutes_good = [x for i, x in enumerate(daily_minutes) if i != outlier] #", "9, 10, 19, 28, 37, 46, 55, 64, 73, 82, 91, 10, 19,", "1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 19, 28, 37,", "2, 3, 4, 5, 6, 7, 8, 9, 10, 19, 28, 37, 46,", "73, 82, 91, 10, 19, 28, 37, 33, 55, 64, 73, 82, 91,", "dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier =", "= [500, 50, 25, 30, 5, 6, 7, 8, 9, 10, 1, 2,", "4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile", "= quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}')", "daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with", "4, 5, 6, 7, 8, 9, 10, 19, 28, 37, 46, 55, 64,", "correlation def main(): num_friends = [500, 50, 25, 30, 5, 6, 7, 8,", "6, 7, 8, 9, 10, 19, 28, 37, 46, 55, 64, 73, 82,", "7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8, 9,", "20, 4, 9, 12, 8, 9, 20, 5, 6, 10, 20, 4, 9,", "quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda", "print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends,", "4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6,", "print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9, 5]) ==", "# 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25)", "varian = variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends:", "5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7,", "10] assert median(vector_a) == (2 + 9) / 2 print(median(vector_a)) print(4//2) # 2", "4, 5, 6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes)", "num_friends = [500, 50, 25, 30, 5, 6, 7, 8, 9, 10, 1,", "moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs(): plt.show() if __name__ == \"__main__\": main()", "result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%:", "20, 4, 9, 12, 8, 9, 20, 1, 2, 3, 4, 5, 6,", "daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]):", "plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes:", "in enumerate(num_friends) if i != outlier] daily_minutes_good = [x for i, x in", "List[float]): outlier = num_friends.index(500) num_friends_good = [x for i, x in enumerate(num_friends) if", "daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi", "if i != outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"#", "10, 2, 9, 5]) == 5 vector_a = [1, 9, 2, 10] assert", "standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10, 2, 9, 5])", "91, 10, 19, 28, 37, 33, 55, 64, 73, 82, 91, 10] daily_minutes", "10] daily_minutes = [1, 6, 10, 20, 4, 9, 12, 8, 9, 20,", "correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds,", "(2 + 9) / 2 print(median(vector_a)) print(4//2) # 2 print(9//2) # 4 result_q1", "daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good = [x", "vector_a = [1, 9, 2, 10] assert median(vector_a) == (2 + 9) /", "plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\")", "print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi =", "plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr", "daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation:", "6, 10, 20, 4, 9, 12, 8, 9, 20, 1, 2, 3, 4,", "main(): num_friends = [500, 50, 25, 30, 5, 6, 7, 8, 9, 10,", "10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}') result_q3 = quantile(num_friends, 0.50)", "i != outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of", "2, 10] assert median(vector_a) == (2 + 9) / 2 print(median(vector_a)) print(4//2) #", "statistics.variance import variance, standard_deviation from statistics.correlation import covariance, correlation def main(): num_friends =", "Some exercises about statistics \"\"\" from matplotlib import pyplot as plt from statistics.central_tendencies", "outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\")", "73, 82, 91, 10] daily_minutes = [1, 6, 10, 20, 4, 9, 12,", "64, 73, 82, 91, 10] daily_minutes = [1, 6, 10, 20, 4, 9,", "matplotlib import pyplot as plt from statistics.central_tendencies import * from statistics.variance import variance,", "20, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2,", "def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard", "/ 2 print(median(vector_a)) print(4//2) # 2 print(9//2) # 4 result_q1 = quantile(num_friends, 0.10)", "from statistics.variance import variance, standard_deviation from statistics.correlation import covariance, correlation def main(): num_friends", "quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5", "10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]):", "central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier", "for i, x in enumerate(daily_minutes) if i != outlier] # plotting plt.figure() plt.scatter(num_friends,", "i != outlier] daily_minutes_good = [x for i, x in enumerate(daily_minutes) if i", "variance, standard_deviation from statistics.correlation import covariance, correlation def main(): num_friends = [500, 50,", "30, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6,", "3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4, 5,", "num_friends_good = [x for i, x in enumerate(num_friends) if i != outlier] daily_minutes_good", "= correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian = variance(num_friends) print(f'variance:", "from statistics.central_tendencies import * from statistics.variance import variance, standard_deviation from statistics.correlation import covariance,", "import pyplot as plt from statistics.central_tendencies import * from statistics.variance import variance, standard_deviation", "0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda =", "# plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure()", "as plt from statistics.central_tendencies import * from statistics.variance import variance, standard_deviation from statistics.correlation", "3, 4, 5, 6, 7, 8, 9, 10, 19, 28, 37, 46, 55,", "9, 12, 8, 9, 20, 1, 2, 3, 4, 5, 6, 7, 8,", "= [1, 6, 10, 20, 4, 9, 12, 8, 9, 20, 5, 6,", "List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}')", "= covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends:", "0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs(): plt.show() if", "{cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends)) varian =", "[x for i, x in enumerate(daily_minutes) if i != outlier] # plotting plt.figure()", "= quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%: {result_q2}')", "def correlation_outliers(num_friends: List[float], daily_minutes: List[float]): outlier = num_friends.index(500) num_friends_good = [x for i,", "2, 3, 4, 5, 6, 7, 8, 9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes)", "friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") def", "outlier] daily_minutes_good = [x for i, x in enumerate(daily_minutes) if i != outlier]", "{result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}')", "daily_minutes = [1, 6, 10, 20, 4, 9, 12, 8, 9, 20, 5,", "if i != outlier] daily_minutes_good = [x for i, x in enumerate(daily_minutes) if", "{result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}') result_q5 = quantile(num_friends, 0.90) print(f'quatile", "result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 = quantile(num_friends, 0.25) print(f'quatile 25%:", "10, 20, 4, 9, 12, 8, 9, 20, 1, 2, 3, 4, 5,", "result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%:", "List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes)", "25%: {result_q2}') result_q3 = quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75)", "standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def central_tendencies(num_friends: List[float]): assert median([1, 10, 2,", "daily_minutes) print(f'covariance: {cov}') corr = correlation(num_frieds, daily_minutes) print(f'correlation: {corr}') def dispersion(num_friends: List[float]): print(data_range(num_friends))", "print(data_range(num_friends)) varian = variance(num_friends) print(f'variance: {varian}') standard_devi = standard_deviation(num_friends) print(f'standard deviation: {standard_devi}') def", "9, 20, 5, 6, 10, 20, 4, 9, 12, 8, 9, 20, 1,", "2 print(median(vector_a)) print(4//2) # 2 print(9//2) # 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile", "= quantile(num_friends, 0.50) print(f'quatile 50%: {result_q3}') result_q4 = quantile(num_friends, 0.75) print(f'quatile 75%: {result_q4}')", "= quantile(num_friends, 0.90) print(f'quatile 90%: {result_q5}') moda = set(mode(num_friends)) print(f'moda: {moda}') def plot_graphs():", "def main(): num_friends = [500, 50, 25, 30, 5, 6, 7, 8, 9,", "9, 10] central_tendencies(num_friends) dispersion(num_friends) correlations(num_friends, daily_minutes) correlation_outliers(num_friends, daily_minutes) plot_graphs() def correlation_outliers(num_friends: List[float], daily_minutes:", "91, 10] daily_minutes = [1, 6, 10, 20, 4, 9, 12, 8, 9,", "4, 9, 12, 8, 9, 20, 1, 2, 3, 4, 5, 6, 7,", "28, 37, 33, 55, 64, 73, 82, 91, 10] daily_minutes = [1, 6,", "28, 37, 46, 55, 64, 73, 82, 91, 10, 19, 28, 37, 33,", "9) / 2 print(median(vector_a)) print(4//2) # 2 print(9//2) # 4 result_q1 = quantile(num_friends,", "median(vector_a) == (2 + 9) / 2 print(median(vector_a)) print(4//2) # 2 print(9//2) #", "plt from statistics.central_tendencies import * from statistics.variance import variance, standard_deviation from statistics.correlation import", "33, 55, 64, 73, 82, 91, 10] daily_minutes = [1, 6, 10, 20,", "import variance, standard_deviation from statistics.correlation import covariance, correlation def main(): num_friends = [500,", "2, 3, 4, 5, 6, 7, 8, 9, 10, 1, 2, 3, 4,", "covariance, correlation def main(): num_friends = [500, 50, 25, 30, 5, 6, 7,", "plt.title(\"Correlation without outlier\") plt.xlabel(\"# of friends\") plt.ylabel(\"minutes\") plt.figure() plt.scatter(num_friends_good, daily_minutes_good) plt.title(\"Correlation with outlier\")", "9, 12, 8, 9, 20, 5, 6, 10, 20, 4, 9, 12, 8,", "of friends\") plt.ylabel(\"minutes\") def correlations(num_frieds: List[float], daily_minutes: List[float]): cov = covariance(num_frieds, daily_minutes) print(f'covariance:", "[1, 9, 2, 10] assert median(vector_a) == (2 + 9) / 2 print(median(vector_a))", "6, 7, 8, 9, 10, 1, 2, 3, 4, 5, 6, 7, 8,", "2 print(9//2) # 4 result_q1 = quantile(num_friends, 0.10) print(f'quatile 10%: {result_q1}') result_q2 =", "enumerate(daily_minutes) if i != outlier] # plotting plt.figure() plt.scatter(num_friends, daily_minutes) plt.title(\"Correlation without outlier\")" ]
[ "= ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def", "None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if PY36:", "wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel", "topic: TopicName, value: Union[str, Path], key: Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value)))", "class. # Barbara, forgive us. Looks like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore):", "if key is not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str:", "= ValueSchemaDescription(text=derive(value, topic)) if key is not None: self._keys[topic] = KeySchemaDescription(text=derive(key, topic, is_key=True))", "wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel", "(tribunsky.kir): refactor it, maybe add hooks to parent class. # Barbara, forgive us.", "# ToDo (tribunsky.kir): change Type[AvroModel] to more general alias + check derivation from", "add hooks to parent class. # Barbara, forgive us. Looks like AbstractDescriptionStore should", "KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if PY36: AvroModel() # ToDo", "= KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if PY36: AvroModel() #", "Union, Optional from pathlib import Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from", "topic: TopicName, value: str, key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if key", "Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is not None: self._keys[topic]", "key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is not None:", "Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types", "AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self,", "Path], key: Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not", "Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is not", "from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36 from", "import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import", "ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self)", "def _load_from_file(filename: Path) -> str: with open(filename) as fl: return fl.read() # ToDo", "AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive", "import PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value:", "from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import", "not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with open(filename) as", "fl.read() # ToDo (tribunsky.kir): refactor it, maybe add hooks to parent class. #", "should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str, Path], key:", "derivation from python built-ins def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) ->", "class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir):", "wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore):", "-> None: self._values[topic] = ValueSchemaDescription(text=value) if key is not None: self._keys[topic] = KeySchemaDescription(text=key)", "if key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) ->", "as fl: return fl.read() # ToDo (tribunsky.kir): refactor it, maybe add hooks to", "_load_from_file(filename: Path) -> str: with open(filename) as fl: return fl.read() # ToDo (tribunsky.kir):", "wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str, key: str)", "KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with open(filename) as fl: return fl.read() #", "import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str, key: str) ->", "__init__(self) -> None: super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to", "Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic] =", "wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName,", "def add(self, topic: TopicName, value: Union[str, Path], key: Union[str, Path]) -> None: self._values[topic]", "import AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def", "-> None: super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to more", "refactor it, maybe add hooks to parent class. # Barbara, forgive us. Looks", "TopicName, value: str, key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if key is", "# ToDo (tribunsky.kir): refactor it, maybe add hooks to parent class. # Barbara,", "if PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to more general alias +", "SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str, Path], key: Union[str, Path]) -> None:", "self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if PY36: AvroModel()", "to more general alias + check derivation from python built-ins def add(self, topic:", "to parent class. # Barbara, forgive us. Looks like AbstractDescriptionStore should be generic.", "None: self._values[topic] = ValueSchemaDescription(text=value) if key is not None: self._keys[topic] = KeySchemaDescription(text=key) def", "str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if key is not None: self._keys[topic] =", "topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if", "hooks to parent class. # Barbara, forgive us. Looks like AbstractDescriptionStore should be", "Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic]", "None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class", "like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str,", "is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__()", "None: super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to more general", "def __init__(self) -> None: super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel]", "import Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from", "TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key", "value: Union[str, Path], key: Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key", "super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to more general alias", "def add(self, topic: TopicName, value: str, key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value)", "Optional from pathlib import Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc", "open(filename) as fl: return fl.read() # ToDo (tribunsky.kir): refactor it, maybe add hooks", "generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str, Path], key: Union[str, Path])", "-> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is not None: self._keys[topic] =", "self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore):", "ToDo (tribunsky.kir): change Type[AvroModel] to more general alias + check derivation from python", "maybe add hooks to parent class. # Barbara, forgive us. Looks like AbstractDescriptionStore", "= KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with open(filename) as fl: return fl.read()", "key is not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with", "str, key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if key is not None:", "return fl.read() # ToDo (tribunsky.kir): refactor it, maybe add hooks to parent class.", "-> str: with open(filename) as fl: return fl.read() # ToDo (tribunsky.kir): refactor it,", "ToDo (tribunsky.kir): refactor it, maybe add hooks to parent class. # Barbara, forgive", "is not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with open(filename)", "derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str, key: str) -> None:", "# Barbara, forgive us. Looks like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def", "def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value,", "from python built-ins def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None:", "TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants", "value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is", "typing import Type, Union, Optional from pathlib import Path from wunderkafka.types import TopicName,", "ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36", "check derivation from python built-ins def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]])", "AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if PY36: AvroModel() # ToDo (tribunsky.kir): change", "parent class. # Barbara, forgive us. Looks like AbstractDescriptionStore should be generic. class", "Union[str, Path], key: Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is", "import Type, Union, Optional from pathlib import Path from wunderkafka.types import TopicName, KeySchemaDescription,", "it, maybe add hooks to parent class. # Barbara, forgive us. Looks like", "self._values[topic] = ValueSchemaDescription(text=value) if key is not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename:", "= ValueSchemaDescription(text=value) if key is not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path)", "class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str, key: str) -> None: self._values[topic]", "key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None:", "from typing import Type, Union, Optional from pathlib import Path from wunderkafka.types import", "built-ins def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] =", "-> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key)))", "general alias + check derivation from python built-ins def add(self, topic: TopicName, value:", "python built-ins def add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic]", "with open(filename) as fl: return fl.read() # ToDo (tribunsky.kir): refactor it, maybe add", "Barbara, forgive us. Looks like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self,", "forgive us. Looks like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic:", "from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic:", "+ check derivation from python built-ins def add(self, topic: TopicName, value: Type[AvroModel], key:", "alias + check derivation from python built-ins def add(self, topic: TopicName, value: Type[AvroModel],", "TopicName, value: Union[str, Path], key: Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if", "Type[AvroModel] to more general alias + check derivation from python built-ins def add(self,", "import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from", "class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str, Path], key: Union[str, Path]) ->", "AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to more general alias + check derivation", "ValueSchemaDescription(text=value) if key is not None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) ->", "Type, Union, Optional from pathlib import Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription", "pathlib import Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore", "more general alias + check derivation from python built-ins def add(self, topic: TopicName,", "be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str, Path], key: Union[str,", "AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: Union[str, Path],", "self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with open(filename) as fl: return", "us. Looks like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName,", "self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is not None: self._keys[topic] = KeySchemaDescription(text=derive(key, topic,", "from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import PY36 from wunderkafka.serdes.avromodel import derive class", "KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import AbstractDescriptionStore from wunderkafka.compat.types import AvroModel from wunderkafka.compat.constants import", "from pathlib import Path from wunderkafka.types import TopicName, KeySchemaDescription, ValueSchemaDescription from wunderkafka.serdes.abc import", "value: str, key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if key is not", "add(self, topic: TopicName, value: str, key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if", "key: str) -> None: self._values[topic] = ValueSchemaDescription(text=value) if key is not None: self._keys[topic]", "PY36: AvroModel() # ToDo (tribunsky.kir): change Type[AvroModel] to more general alias + check", "add(self, topic: TopicName, value: Union[str, Path], key: Union[str, Path]) -> None: self._values[topic] =", "SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str, key: str) -> None: self._values[topic] =", "(tribunsky.kir): change Type[AvroModel] to more general alias + check derivation from python built-ins", "PY36 from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str,", "str: with open(filename) as fl: return fl.read() # ToDo (tribunsky.kir): refactor it, maybe", "Looks like AbstractDescriptionStore should be generic. class SchemaFSRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value:", "not None: self._keys[topic] = KeySchemaDescription(text=_load_from_file(Path(key))) class AvroModelRepo(AbstractDescriptionStore): def __init__(self) -> None: super().__init__() if", "key: Union[str, Path]) -> None: self._values[topic] = ValueSchemaDescription(text=_load_from_file(Path(value))) if key is not None:", "fl: return fl.read() # ToDo (tribunsky.kir): refactor it, maybe add hooks to parent", "add(self, topic: TopicName, value: Type[AvroModel], key: Optional[Type[AvroModel]]) -> None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic))", "None: self._values[topic] = ValueSchemaDescription(text=derive(value, topic)) if key is not None: self._keys[topic] = KeySchemaDescription(text=derive(key,", "change Type[AvroModel] to more general alias + check derivation from python built-ins def", "from wunderkafka.serdes.avromodel import derive class SchemaTextRepo(AbstractDescriptionStore): def add(self, topic: TopicName, value: str, key:", "Path) -> str: with open(filename) as fl: return fl.read() # ToDo (tribunsky.kir): refactor", "None: self._keys[topic] = KeySchemaDescription(text=key) def _load_from_file(filename: Path) -> str: with open(filename) as fl:" ]
[ "<filename>bin/tests/test_design.py \"\"\"Tests for design.py \"\"\" import random import os import copy import tempfile", "as f: for i, line in enumerate(f): if i == 0: headers =", "== 0: headers = line.split('\\t') # Will raise an error if header is", "guide counts as a match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0'])", "def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since sequences are", "logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA \"\"\" def setUp(self): super().setUp()", "testing design.py Defines helper functions for test cases and basic setUp and tearDown", "for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase):", "original so # it can be fixed for future tests self.set_mafft_exec = align.set_mafft_exec", "a simple test case, so override this function; store original # so it", "so override this function; store original # so it can be fixed for", "expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args", "def small_fetch(taxid, segment): # 123 is the taxonomic ID used in our specificity", "[\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically download", "\"\"\"Test design.py given arguments to automatically download FASTAs Does not run the entire", "are expected to return a FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write to", "= guide_line.split(' ') for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected))", "to use Cas13a built in model, false to use simple binary prediction specific:", "entire design.py; prematurely stops by giving a fake path to MAFFT. All are", "else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging to", "a FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write to temporary input file with", "Base args set the percentage of sequences to match at 75% expected =", "Produces the correct arguments for a test case given details of what the", "# Curating requires MAFFT, so override this function; store original so # it", "output_loc is None: output_loc = self.output_file.name argv = ['design.py', search_type, input_type] if input_type", "self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging to avoid", "if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if", "search_type, input_type] if input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args':", "arguments for a test case given details of what the test case is", "the output file/directory; set to self.output_file.name if None Returns: List of strings that", "'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if", "# Create a temporary output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv =", "f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\")", "file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that it", "test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't use model, just", "= design.argv_to_args(argv) design.run(args) # AA isn't allowed in 1st window by specificity fasta,", "self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta seq_io.write_fasta(SEQS,", "expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary fasta", "class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO) # Create a temporary input", "fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args) #", "design.py help for details on input Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta',", "import os import copy import tempfile import unittest import logging from collections import", "a TSV file of test output and expected output, fails the test if", "MAFFT. All are expected to return a FileNotFoundError \"\"\" def setUp(self): super().setUp() #", "ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from bin import design __author__ = '<NAME>", "input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it", "expected guide target sequences in each line of the output header: the header", "from adapt.utils import seq_io from bin import design __author__ = '<NAME> <<EMAIL>>' #", "== 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction',", "output_loc: path to the output file/directory; set to self.output_file.name if None Returns: List", "= None with open(file) as f: for i, line in enumerate(f): if i", "objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be considered identical) must be", "= headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split('", "all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align = self.align prepare_alignment.fetch_sequences_for_taxonomy", "Windows self.input_file.close() # Create a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close()", "true to use Cas13a built in model, false to use simple binary prediction", "output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args =", "test_specificity_fastas(self): # Create a temporary fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False)", "mismatches # (so same outputs as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file,", "so # it can be fixed for future tests self.curate_against_ref = align.curate_against_ref def", "be fixed for future tests self.align = align.align align.align = lambda seqs, am=None:", "different outputs for every cluster # Our test only produces 1 cluster, so", "details on input Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file'", "try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args", "Doesn't use model, just greedy binary prediction with 0 mismatches # (so same", "1 base for primer on each side, # only finds 1 target in", "unittest import logging from collections import OrderedDict from argparse import Namespace from adapt", "the name of that file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) #", "'-pl', '1', '--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif", "strings that are the arguments of the test \"\"\" input_file = self.input_file.name if", "= \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops AA", "can be reopened on Windows self.sp_file.close() # 'auto-from-args' gives different outputs for every", "file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity", "self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't", "for test cases and basic setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def", "['design.py', search_type, input_type] if input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type ==", "elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1',", "[\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) #", "simple binary prediction specific: None, 'fasta', or 'taxa'; what sort of input to", "argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return argv def tearDown(self): for file in", "to 0 since otherwise # having 1 base in common with a 2", "AA from being the best guide in the 1st window SP_SEQS = OrderedDict()", "size 3, guide size 2, allow GU pairing # GU pairing allows AA", "[\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all overridden functions align.set_mafft_exec =", "align.set_mafft_exec = lambda mafft_path: None # Curating requires MAFFT, so override this function;", "the file so that it can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name)", "to be considered identical) must be set to 0 since otherwise # having", "= design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase):", "small_fetch # Disable warning logging to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv", "of strings that are the arguments of the test \"\"\" input_file = self.input_file.name", "at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv =", "if the test output guide target sequences do not equal the expected guide", "same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix", "results of the test output Given a TSV file of test output and", "allow GU pairing # GU pairing allows AA to match GG in 1st", "\"\"\"General class for testing design.py Defines helper functions for test cases and basic", "args = design.argv_to_args(argv) design.run(args) # Base args set the percentage of sequences to", "it's not the specificity taxonomic ID, test fetching the real # sequences, but", "guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide in guides: self.assertIn(guide, expected[i-1])", "from adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from bin import", "self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide", "'.75', '-pl', '1', '--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75'])", "tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it can be reopened on", "of input to be specific against output_loc: path to the output file/directory; set", "if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window':", "super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA isn't allowed in 1st window", "enumerate(f): if i == 0: headers = line.split('\\t') # Will raise an error", "to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args =", "self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def setUp(self): super().setUp() # Write", "small_curate # Aligning requires MAFFT, so override this function and output simple #", "file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file,", "that it can be reopened on Windows self.input_file.close() # Create a temporary output", "[[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv)", "\"\"\"Test design.py given an input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name", "file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it can", "Curating requires MAFFT, so override this function; store original so # it can", "testing. See design.py help for details on input Args: search_type: 'sliding-window' or 'complete-targets'", "binary prediction with 0 mismatches # (so same outputs as min-guides) expected =", "so override this function; store original so # it can be fixed for", "in model, false to use simple binary prediction specific: None, 'fasta', or 'taxa';", "can be fixed for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path:", "from adapt import alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils import", "so store the name of that file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name,", "to match GG in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"]", "design.py Defines helper functions for test cases and basic setUp and tearDown functions.", "test output guide target sequences do not equal the expected guide target sequences", "self.output_file.name if None Returns: List of strings that are the arguments of the", "'0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return argv def tearDown(self): for file", "the entire design.py; prematurely stops by giving a fake path to MAFFT. All", "specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces the correct arguments for a", "pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args)", "being the best guide in the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] =", "\"\"\"Get arguments for tests Produces the correct arguments for a test case given", "test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError:", "to return a FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write to temporary input", "len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for", "# Same output as test_specificity_fasta, as sequences are the same expected = [[\"AC\",", "file so that it can be reopened on Windows self.sp_file.close() # 'auto-from-args' gives", "seqs, am=None: SEQS # We don't want to fetch sequences for the specificity", "# only finds 1 target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences')", "'-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return argv def tearDown(self): for", "with open(file) as f: for i, line in enumerate(f): if i == 0:", "of what the test case is testing. See design.py help for details on", "def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't use model,", "in seqs \\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate # Aligning", "function; store original # so it can be fixed for future tests self.fetch_sequences_for_taxonomy", "prepare_alignment from adapt.utils import seq_io from bin import design __author__ = '<NAME> <<EMAIL>>'", "SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops AA from being the best guide", "# Closes the file so that it can be reopened on Windows self.input_file.close()", "fixed for future tests self.align = align.align align.align = lambda seqs, am=None: SEQS", "prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging to avoid annotation warning logging.disable(logging.WARNING) def", "logging to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name)", "header='target-sequences'): \"\"\"Check the results of the test output Given a TSV file of", "= self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT, so override", "# Specificity seq stops AA from being the best guide in the 1st", "in enumerate(f): if i == 0: headers = line.split('\\t') # Will raise an", "= line.split('\\t') # Will raise an error if header is not in output", "file since we're # doing a simple test case, so override this function;", "so that it can be reopened on Windows self.sp_file.close() # 'auto-from-args' gives different", "cluster # Our test only produces 1 cluster, so store the name of", "objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) #", "'minimize-guides' or 'maximize-activity' model: boolean, true to use Cas13a built in model, false", "that it can be reopened on Windows self.sp_file.close() # 'auto-from-args' gives different outputs", "baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces", "the test \"\"\" input_file = self.input_file.name if output_loc is None: output_loc = self.output_file.name", "file expected: list of lists of strings, all the expected guide target sequences", "real # sequences, but don't return them as they won't be used else:", "input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) #", "to MAFFT. All are expected to return a FileNotFoundError \"\"\" def setUp(self): super().setUp()", "= [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args =", "=='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be considered identical) must be set", "for future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq:", "each line of the output header: the header of the CSV that contains", "correct arguments for a test case given details of what the test case", "Returns: List of strings that are the arguments of the test \"\"\" input_file", "be specific against output_loc: path to the output file/directory; set to self.output_file.name if", "def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass", "= design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa',", "every cluster # Our test only produces 1 cluster, so store the name", "sequences \"\"\" col_loc = None with open(file) as f: for i, line in", "output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type ==", "taxonomic ID, test fetching the real # sequences, but don't return them as", "Create a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name,", "'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean, true to use Cas13a", "of test output and expected output, fails the test if the test output", "= super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self):", "if input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None',", "sequences do not equal the expected guide target sequences Args: file: string, path", "self.real_output_file]) # We cannot access MAFFT, so override this function; store original so", "SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging to avoid annotation warning logging.disable(logging.WARNING)", "setUp(self): # Disable logging logging.disable(logging.INFO) # Create a temporary input file self.input_file =", "reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args =", "self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file, expected,", "guide target sequences in each line of the output header: the header of", "search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if objective", "tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is the taxonomic ID", "(so same outputs as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def", "self.input_file.close() # Create a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete", "requires MAFFT, so override this function and output simple # test sequences; store", "of the CSV that contains the guide target sequences \"\"\" col_loc = None", "adapt.utils import seq_io from bin import design __author__ = '<NAME> <<EMAIL>>' # Default", "to use simple binary prediction specific: None, 'fasta', or 'taxa'; what sort of", "Create a temporary fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes", "requires MAFFT, so override this function; store original so # it can be", "test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since sequences are short", "the percentage of sequences to match at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]]", "stops by giving a fake path to MAFFT. All are expected to return", "output_loc=None): \"\"\"Get arguments for tests Produces the correct arguments for a test case", "file: string, path name of the file expected: list of lists of strings,", "a 2 base guide counts as a match if specific == 'fasta': argv.extend(['--specific-against-fastas',", "design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='')", "super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def tearDown(self):", "[\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec", "to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args", "just greedy binary prediction with 0 mismatches # (so same outputs as min-guides)", "sequences Args: file: string, path name of the file expected: list of lists", "ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq in seqs \\ if seq.split('.')[0]", "\"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO) # Create a temporary", "target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create", "'.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be considered identical)", "can be reopened on Windows self.input_file.close() # Create a temporary output file self.output_file", "avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args =", "None Returns: List of strings that are the arguments of the test \"\"\"", "virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv", "it can be reopened on Windows self.sp_file.close() # 'auto-from-args' gives different outputs for", "in remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires MAFFT, so override this function", "for details on input Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or", "arguments of the test \"\"\" input_file = self.input_file.name if output_loc is None: output_loc", "123: return SP_SEQS # If it's not the specificity taxonomic ID, test fetching", "# 'auto-from-args' gives different outputs for every cluster # Our test only produces", "# We cannot access MAFFT, so override this function; store original so #", "future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is the taxonomic", "expected) def tearDown(self): # Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref =", "fails the test if the test output guide target sequences do not equal", "[[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary fasta file for", "output as test_specificity_fasta, as sequences are the same expected = [[\"AC\", \"GG\"], [\"CT\"],", "== 123: return SP_SEQS # If it's not the specificity taxonomic ID, test", "# Closes the file so that it can be reopened on Windows self.sp_file.close()", "for design.py \"\"\" import random import os import copy import tempfile import unittest", "not in output col_loc = headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line =", "file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test", "self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None # Curating requires MAFFT, so", "self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq", "having 1 base in common with a 2 base guide counts as a", "and need 1 base for primer on each side, # only finds 1", "setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input", "considered identical) must be set to 0 since otherwise # having 1 base", "args = design.argv_to_args(argv) design.run(args) # Doesn't use model, just greedy binary prediction with", "design.argv_to_args(argv) design.run(args) # Since sequences are short and need 1 base for primer", "functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO) # Create a", "fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so", "name of that file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We", "f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory self.output_dir = tempfile.TemporaryDirectory() def", "class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def setUp(self): super().setUp() # Write to", "self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that it can", "original so # it can be fixed for future tests self.curate_against_ref = align.curate_against_ref", "argv def tearDown(self): for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging", "remove_ref_accs=[]): return {seq: seqs[seq] for seq in seqs \\ if seq.split('.')[0] not in", "arguments for tests Produces the correct arguments for a test case given details", "def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError:", "output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self,", "1st window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] =", "return {seq: seqs[seq] for seq in seqs \\ if seq.split('.')[0] not in remove_ref_accs}", "short and need 1 base for primer on each side, # only finds", "align.curate_against_ref = small_curate # Aligning requires MAFFT, so override this function and output", "small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq in seqs \\ if", "override this function; store original # so it can be fixed for future", "the real # sequences, but don't return them as they won't be used", "SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class for testing design.py", "guide size 2, allow GU pairing # GU pairing allows AA to match", "SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops AA from being", "import logging from collections import OrderedDict from argparse import Namespace from adapt import", "the test if the test output guide target sequences do not equal the", "guide target sequences Args: file: string, path name of the file expected: list", "primer on each side, # only finds 1 target in middle expected =", "temporary output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args", "all the expected guide target sequences in each line of the output header:", "specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces the correct arguments for a test", "for the specificity file since we're # doing a simple test case, so", "design.py fully through \"\"\" def setUp(self): super().setUp() # Write to temporary input file", "seq_io from bin import design __author__ = '<NAME> <<EMAIL>>' # Default args: window", "random import os import copy import tempfile import unittest import logging from collections", "with 0 mismatches # (so same outputs as min-guides) expected = [[\"AA\"], [\"CT\"],", "specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity':", "temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv)", "this function; store original so # it can be fixed for future tests", "col_loc = None with open(file) as f: for i, line in enumerate(f): if", "\"\"\" input_file = self.input_file.name if output_loc is None: output_loc = self.output_file.name argv =", "to self.output_file.name if None Returns: List of strings that are the arguments of", "'2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm', '0',", "'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches", "fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is", "FASTAs Does not run the entire design.py; prematurely stops by giving a fake", "prediction with 0 mismatches # (so same outputs as min-guides) expected = [[\"AA\"],", "of the file expected: list of lists of strings, all the expected guide", "= \"GGCTT\" # Specificity seq stops AA from being the best guide in", "pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError:", "TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def setUp(self): super().setUp() # Write to temporary", "store original so # it can be fixed for future tests self.set_mafft_exec =", "# Doesn't use model, just greedy binary prediction with 0 mismatches # (so", "this function and output simple # test sequences; store original so it can", "argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to", "headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ')", "import design __author__ = '<NAME> <<EMAIL>>' # Default args: window size 3, guide", "logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA \"\"\" def setUp(self):", "not the specificity taxonomic ID, test fetching the real # sequences, but don't", "adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from bin import design", "target sequences in each line of the output header: the header of the", "expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None,", "def setUp(self): super().setUp() # Write to temporary input file with open(self.input_file.name, 'w') as", "output guide target sequences do not equal the expected guide target sequences Args:", "our specificity file if taxid == 123: return SP_SEQS # If it's not", "percentage of sequences to match at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file,", "that it can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv =", "os import copy import tempfile import unittest import logging from collections import OrderedDict", "them as they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy =", "# Disable logging logging.disable(logging.INFO) # Create a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w',", "target sequences do not equal the expected guide target sequences Args: file: string,", "case, so override this function; store original # so it can be fixed", "delete=False) # Closes the file so that it can be reopened on Windows", "input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w',", "+ '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT, so override this function;", "super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since sequences are short and need 1", "expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args", "input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc])", "'--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model')", "specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup()", "OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class for testing design.py Defines helper", "Closes the file so that it can be reopened on Windows self.input_file.close() #", "a match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific ==", "= tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that it can be", "'0', '-gl', '2']) return argv def tearDown(self): for file in self.files_to_delete: if os.path.isfile(file):", "bin import design __author__ = '<NAME> <<EMAIL>>' # Default args: window size 3,", "def test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base args set", "Create a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the", "self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT, so", "it can be fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment):", "design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args =", "123 is the taxonomic ID used in our specificity file if taxid ==", "if search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp',", "'3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2'])", "same outputs as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self):", "gives different outputs for every cluster # Our test only produces 1 cluster,", "and basic setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable", "return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging to avoid annotation warning", "argv = ['design.py', search_type, input_type] if input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif", "['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3']) if", "sequences are short and need 1 base for primer on each side, #", "base in common with a 2 base guide counts as a match if", "outputs as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv", "function; store original so # it can be fixed for future tests self.curate_against_ref", "access MAFFT, so override this function; store original so # it can be", "design __author__ = '<NAME> <<EMAIL>>' # Default args: window size 3, guide size", "= line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides),", "for every cluster # Our test only produces 1 cluster, so store the", "false to use simple binary prediction specific: None, 'fasta', or 'taxa'; what sort", "seqs[seq] for seq in seqs \\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref =", "don't want to fetch sequences for the specificity file since we're # doing", "the taxonomic ID used in our specificity file if taxid == 123: return", "tempfile import unittest import logging from collections import OrderedDict from argparse import Namespace", "= tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args)", "be fixed for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None", "don't return them as they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS", "the test output guide target sequences do not equal the expected guide target", "seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base", "middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary", "stops AA from being the best guide in the 1st window SP_SEQS =", "against output_loc: path to the output file/directory; set to self.output_file.name if None Returns:", "self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base args", "self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it can be", "specificity file since we're # doing a simple test case, so override this", "class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically download FASTAs Does not run", "self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def", "from collections import OrderedDict from argparse import Namespace from adapt import alignment from", "super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def setUp(self): super().setUp() #", "is not in output col_loc = headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line", "import tempfile import unittest import logging from collections import OrderedDict from argparse import", "= [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments", "def setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary", "or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean,", "SP_SEQS # If it's not the specificity taxonomic ID, test fetching the real", "# 123 is the taxonomic ID used in our specificity file if taxid", "\"\"\" col_loc = None with open(file) as f: for i, line in enumerate(f):", "'1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets':", "= OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] =", "the output header: the header of the CSV that contains the guide target", "[\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) #", "\"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically", "'0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be", "output Given a TSV file of test output and expected output, fails the", "None with open(file) as f: for i, line in enumerate(f): if i ==", "helper functions for test cases and basic setUp and tearDown functions. \"\"\" class", "test \"\"\" input_file = self.input_file.name if output_loc is None: output_loc = self.output_file.name argv", "'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if objective == 'minimize-guides':", "'.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT, so override this function; store", "need 1 base for primer on each side, # only finds 1 target", "# Disable warning logging to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv =", "design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully", "tearDown(self): # Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align", "that are the arguments of the test \"\"\" input_file = self.input_file.name if output_loc", "len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get", "in 1st window by specificity fasta, # so 1st window changes expected =", "Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or", "test fetching the real # sequences, but don't return them as they won't", "= [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all overridden", "pairing allows AA to match GG in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"]", "self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it can be", "1 base in common with a 2 base guide counts as a match", "design.run(args) # Same output as test_specificity_fasta, as sequences are the same expected =", "in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3'])", "'0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed',", "FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except", "objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return argv", "= self.input_file.name if output_loc is None: output_loc = self.output_file.name argv = ['design.py', search_type,", "giving a fake path to MAFFT. All are expected to return a FileNotFoundError", "test case is testing. See design.py help for details on input Args: search_type:", "Closes the file so that it can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS,", "We cannot access MAFFT, so override this function; store original so # it", "None, 'fasta', or 'taxa'; what sort of input to be specific against output_loc:", "'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file self.sp_file =", "store original # so it can be fixed for future tests self.fetch_sequences_for_taxonomy =", "= align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None # Curating requires MAFFT, so override", "'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type", "guides = guide_line.split(' ') for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i,", "raise an error if header is not in output col_loc = headers.index(header) continue", "= prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is the taxonomic ID used in", "Does not run the entire design.py; prematurely stops by giving a fake path", "FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try:", "import seq_io from bin import design __author__ = '<NAME> <<EMAIL>>' # Default args:", "seq stops AA from being the best guide in the 1st window SP_SEQS", "return a FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write to temporary input file", "# sequences, but don't return them as they won't be used else: self.fetch_sequences_for_taxonomy(taxid,", "to the output file/directory; set to self.output_file.name if None Returns: List of strings", "argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA isn't allowed in", "are short and need 1 base for primer on each side, # only", "taxonomic ID used in our specificity file if taxid == 123: return SP_SEQS", "a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so", "that contains the guide target sequences \"\"\" col_loc = None with open(file) as", "outputs for every cluster # Our test only produces 1 cluster, so store", "path to MAFFT. All are expected to return a FileNotFoundError \"\"\" def setUp(self):", "Our test only produces 1 cluster, so store the name of that file", "= super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def", "SEQS # We don't want to fetch sequences for the specificity file since", "self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given", "set to self.output_file.name if None Returns: List of strings that are the arguments", "self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT, so override this function; store original", "used in our specificity file if taxid == 123: return SP_SEQS # If", "search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75',", "case given details of what the test case is testing. See design.py help", "'maximize-activity' model: boolean, true to use Cas13a built in model, false to use", "output col_loc = headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides", "is None: output_loc = self.output_file.name argv = ['design.py', search_type, input_type] if input_type ==", "model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces the correct arguments for", "'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean, true to use Cas13a built in", "# Create a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete =", "on each side, # only finds 1 target in middle expected = [[\"CT\"]]", "expected guide target sequences Args: file: string, path name of the file expected:", "guide target sequences \"\"\" col_loc = None with open(file) as f: for i,", "logging logging.disable(logging.INFO) # Create a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) #", "+ 1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide in guides:", "size 2, allow GU pairing # GU pairing allows AA to match GG", "a test case given details of what the test case is testing. See", "store the name of that file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file])", "directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv)", "alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from bin", "temporary input file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a", "reopened on Windows self.sp_file.close() # 'auto-from-args' gives different outputs for every cluster #", "\"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops AA from", "as sequences are the same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected)", "base guide counts as a match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m',", "in each line of the output header: the header of the CSV that", "test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass", "test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base args set the", "if header is not in output col_loc = headers.index(header) continue self.assertLess(i, len(expected) +", "only finds 1 target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def", "75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity')", "# Create a temporary fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) #", "can be fixed for future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None,", "return SP_SEQS # If it's not the specificity taxonomic ID, test fetching the", "so it can be fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid,", "line.split('\\t') # Will raise an error if header is not in output col_loc", "original # so it can be fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy", "the test output Given a TSV file of test output and expected output,", "the CSV that contains the guide target sequences \"\"\" col_loc = None with", "== 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type", "\"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to", "setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO)", "seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA", "set the percentage of sequences to match at 75% expected = [[\"AA\"], [\"CT\"],", "'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model:", "error if header is not in output col_loc = headers.index(header) continue self.assertLess(i, len(expected)", "self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None):", "download FASTAs Does not run the entire design.py; prematurely stops by giving a", "= super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv", "for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it", "= small_fetch # Disable warning logging to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self):", "[[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to", "as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv =", "prematurely stops by giving a fake path to MAFFT. All are expected to", "'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2',", "\"\"\"Test design.py fully through \"\"\" def setUp(self): super().setUp() # Write to temporary input", "on Windows self.sp_file.close() # 'auto-from-args' gives different outputs for every cluster # Our", "so override this function and output simple # test sequences; store original so", "super().setUp() # Write to temporary input file with open(self.input_file.name, 'w') as f: f.write(\"Zika", "fasta, # so 1st window changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file,", "what sort of input to be specific against output_loc: path to the output", "if taxid == 123: return SP_SEQS # If it's not the specificity taxonomic", "of the output header: the header of the CSV that contains the guide", "align.align = lambda seqs, am=None: SEQS # We don't want to fetch sequences", "1st window by specificity fasta, # so 1st window changes expected = [[\"AC\",", "If it's not the specificity taxonomic ID, test fetching the real # sequences,", "def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results of the test output Given", "so that it can be reopened on Windows self.input_file.close() # Create a temporary", "= align.align align.align = lambda seqs, am=None: SEQS # We don't want to", "from argparse import Namespace from adapt import alignment from adapt.prepare import align, ncbi_neighbors,", "'auto-from-args' gives different outputs for every cluster # Our test only produces 1", "test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same output", "TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO) # Create a temporary input file", "allowed in 1st window by specificity fasta, # so 1st window changes expected", "file/directory; set to self.output_file.name if None Returns: List of strings that are the", "'2']) return argv def tearDown(self): for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) #", "test output Given a TSV file of test output and expected output, fails", "tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for", "seqs \\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires", "design.py; prematurely stops by giving a fake path to MAFFT. All are expected", "align.align align.align = lambda seqs, am=None: SEQS # We don't want to fetch", "Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align = self.align", "of sequences to match at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected)", "argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass", "= tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'):", "since otherwise # having 1 base in common with a 2 base guide", "args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class", "design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test", "# Create a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the", "of strings, all the expected guide target sequences in each line of the", "Specificity seq stops AA from being the best guide in the 1st window", "SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class for testing design.py Defines helper functions", "on Windows self.input_file.close() # Create a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False)", "\"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq", "specificity taxonomic ID, test fetching the real # sequences, but don't return them", "guide in the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object):", "test case, so override this function; store original # so it can be", "output file/directory; set to self.output_file.name if None Returns: List of strings that are", "argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site',", "use model, just greedy binary prediction with 0 mismatches # (so same outputs", "# Our test only produces 1 cluster, so store the name of that", "lambda seqs, am=None: SEQS # We don't want to fetch sequences for the", "model, just greedy binary prediction with 0 mismatches # (so same outputs as", "'1', '--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective", "self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try:", "warning logging to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa',", "window changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test", "match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific == 'taxa':", "2 base guide counts as a match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file,", "design.argv_to_args(argv) design.run(args) # Base args set the percentage of sequences to match at", "pairing # GU pairing allows AA to match GG in 1st window SEQS", "design.argv_to_args(argv) design.run(args) # Doesn't use model, just greedy binary prediction with 0 mismatches", "= [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args =", "for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is the", "fully through \"\"\" def setUp(self): super().setUp() # Write to temporary input file with", "# Aligning requires MAFFT, so override this function and output simple # test", "file if taxid == 123: return SP_SEQS # If it's not the specificity", "check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results of the test output Given a", "'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0'])", "allows AA to match GG in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"] =", "# Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA \"\"\"", "We don't want to fetch sequences for the specificity file since we're #", "for testing design.py Defines helper functions for test cases and basic setUp and", "the file so that it can be reopened on Windows self.input_file.close() # Create", "what the test case is testing. See design.py help for details on input", "= self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name)", "try: design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py", "open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory self.output_dir", "seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires MAFFT, so override", "segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging to avoid annotation", "search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces the", "temporary fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file", "overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align = self.align prepare_alignment.fetch_sequences_for_taxonomy =", "self.input_file.name if output_loc is None: output_loc = self.output_file.name argv = ['design.py', search_type, input_type]", "if output_loc is None: output_loc = self.output_file.name argv = ['design.py', search_type, input_type] if", "if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires MAFFT, so", "except FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args)", "'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean, true to use", "input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path',", "expected to return a FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write to temporary", "produces 1 cluster, so store the name of that file self.real_output_file = self.output_file.name", "args = design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta, as sequences are the", "# ID-M (mismatches to be considered identical) must be set to 0 since", "test sequences; store original so it can be fixed for future tests self.align", "'-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp',", "args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args',", "best guide in the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class", "so it can be fixed for future tests self.align = align.align align.align =", "window size 3, guide size 2, allow GU pairing # GU pairing allows", "self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA isn't allowed", "through \"\"\" def setUp(self): super().setUp() # Write to temporary input file with open(self.input_file.name,", "# so 1st window changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected)", "design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta, as sequences are the same expected", "in our specificity file if taxid == 123: return SP_SEQS # If it's", "self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None,", "\"\"\" import random import os import copy import tempfile import unittest import logging", "guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False,", "is testing. See design.py help for details on input Args: search_type: 'sliding-window' or", "temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that", "as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory self.output_dir = tempfile.TemporaryDirectory()", "file of test output and expected output, fails the test if the test", "this function; store original # so it can be fixed for future tests", "input Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides'", "target sequences \"\"\" col_loc = None with open(file) as f: for i, line", "so that it can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv", "specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj',", "the best guide in the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\"", "the guide target sequences \"\"\" col_loc = None with open(file) as f: for", "ID used in our specificity file if taxid == 123: return SP_SEQS #", "Same output as test_specificity_fasta, as sequences are the same expected = [[\"AC\", \"GG\"],", "= [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary fasta file", "on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv)", "sequences; store original so it can be fixed for future tests self.align =", "sort of input to be specific against output_loc: path to the output file/directory;", "function; store original so # it can be fixed for future tests self.set_mafft_exec", "'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1',", "prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is the taxonomic ID used in our", "List of strings that are the arguments of the test \"\"\" input_file =", "GU pairing allows AA to match GG in 1st window SEQS = OrderedDict()", "to fetch sequences for the specificity file since we're # doing a simple", "# so it can be fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def", "expected, header='target-sequences'): \"\"\"Check the results of the test output Given a TSV file", "the expected guide target sequences Args: file: string, path name of the file", "must be set to 0 since otherwise # having 1 base in common", "= super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base args set the percentage of", "for tests Produces the correct arguments for a test case given details of", "argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't use model, just greedy", "sequences, but don't return them as they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment)", "from bin import design __author__ = '<NAME> <<EMAIL>>' # Default args: window size", "objective, '--seed', '0', '-gl', '2']) return argv def tearDown(self): for file in self.files_to_delete:", "# Default args: window size 3, guide size 2, allow GU pairing #", "expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given", "design.py given arguments to automatically download FASTAs Does not run the entire design.py;", "with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file", "\"\"\"Check the results of the test output Given a TSV file of test", "a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file", "MAFFT, so override this function; store original so # it can be fixed", "sequences in each line of the output header: the header of the CSV", "= lambda mafft_path: None # Curating requires MAFFT, so override this function; store", "the header of the CSV that contains the guide target sequences \"\"\" col_loc", "argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same output as", "0: headers = line.split('\\t') # Will raise an error if header is not", "'0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif", "= design.argv_to_args(argv) design.run(args) # Base args set the percentage of sequences to match", "args set the percentage of sequences to match at 75% expected = [[\"AA\"],", "= \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops AA from being the", "1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide in guides: self.assertIn(guide,", "Write to temporary input file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") #", "return them as they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy", "the expected guide target sequences in each line of the output header: the", "Args: file: string, path name of the file expected: list of lists of", "def tearDown(self): # Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref", "it can be fixed for future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs,", "if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa',", "given details of what the test case is testing. See design.py help for", "= self.output_file.name argv = ['design.py', search_type, input_type] if input_type == 'fasta': argv.extend([input_file, '-o',", "match GG in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] =", "in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a", "it can be reopened on Windows self.input_file.close() # Create a temporary output file", "elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return", "if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an", "header: the header of the CSV that contains the guide target sequences \"\"\"", "f: for i, line in enumerate(f): if i == 0: headers = line.split('\\t')", "delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that it can be reopened on", "warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args)", "'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']:", "os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA", "# GU pairing allows AA to match GG in 1st window SEQS =", "if i == 0: headers = line.split('\\t') # Will raise an error if", "super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't use model, just greedy binary prediction", "argparse import Namespace from adapt import alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment", "GU pairing # GU pairing allows AA to match GG in 1st window", "1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class for", "output header: the header of the CSV that contains the guide target sequences", "be reopened on Windows self.sp_file.close() # 'auto-from-args' gives different outputs for every cluster", "override this function and output simple # test sequences; store original so it", "can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name)", "string, path name of the file expected: list of lists of strings, all", "elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective", "an error if header is not in output col_loc = headers.index(header) continue self.assertLess(i,", "header is not in output col_loc = headers.index(header) continue self.assertLess(i, len(expected) + 1)", "== 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif", "to match at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self):", "in the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General", "run the entire design.py; prematurely stops by giving a fake path to MAFFT.", "specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def tearDown(self): super().tearDown()", "FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\"", "tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def setUp(self): super().setUp()", "guide_line.split(' ') for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def", "== 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path'])", "identical) must be set to 0 since otherwise # having 1 base in", "super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv", "self.sp_file.close() # 'auto-from-args' gives different outputs for every cluster # Our test only", "super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta, as", "args = design.argv_to_args(argv) design.run(args) # AA isn't allowed in 1st window by specificity", "+ '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self):", "specificity_file, '--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model:", "self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) #", "the results of the test output Given a TSV file of test output", "\"\"\" def setUp(self): super().setUp() # Write to temporary input file with open(self.input_file.name, 'w')", "specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that", "temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def", "want to fetch sequences for the specificity file since we're # doing a", "1st window changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase):", "specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta, as sequences", "window by specificity fasta, # so 1st window changes expected = [[\"AC\", \"GG\"],", "use simple binary prediction specific: None, 'fasta', or 'taxa'; what sort of input", "min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets')", "objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces the correct arguments", "model, false to use simple binary prediction specific: None, 'fasta', or 'taxa'; what", "otherwise # having 1 base in common with a 2 base guide counts", "guide target sequences do not equal the expected guide target sequences Args: file:", "try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv)", "tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that it can be reopened", "objective: 'minimize-guides' or 'maximize-activity' model: boolean, true to use Cas13a built in model,", "= ['design.py', search_type, input_type] if input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type", "headers = line.split('\\t') # Will raise an error if header is not in", "automatically download FASTAs Does not run the entire design.py; prematurely stops by giving", "pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def", "import copy import tempfile import unittest import logging from collections import OrderedDict from", "= design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args", "self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results of", "that file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access", "tearDown(self): for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class", "functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align = self.align prepare_alignment.fetch_sequences_for_taxonomy = self.fetch_sequences_for_taxonomy", "as test_specificity_fasta, as sequences are the same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]]", "file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output", "def tearDown(self): for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET)", "self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary fasta file for specificity", "Windows self.sp_file.close() # 'auto-from-args' gives different outputs for every cluster # Our test", "# Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv()", "mafft_path: None # Curating requires MAFFT, so override this function; store original so", "the test case is testing. See design.py help for details on input Args:", "tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None # Curating requires MAFFT,", "def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq in seqs \\", "set to 0 since otherwise # having 1 base in common with a", "[\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically download FASTAs", "design.run(args) # AA isn't allowed in 1st window by specificity fasta, # so", "== 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if objective ==", "self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT, so override this", "can be fixed for future tests self.align = align.align align.align = lambda seqs,", "super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base args set the percentage of sequences", "specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA isn't allowed in 1st window by", "col_loc = headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides =", "[\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args)", "self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments", "'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in", "Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args", "def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests", "design.py \"\"\" import random import os import copy import tempfile import unittest import", "are the arguments of the test \"\"\" input_file = self.input_file.name if output_loc is", "the specificity file since we're # doing a simple test case, so override", "only produces 1 cluster, so store the name of that file self.real_output_file =", "given an input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv'", "if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy'])", "GG in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\"", "future tests self.align = align.align align.align = lambda seqs, am=None: SEQS # We", "== 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M", "1 target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self): #", "input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args)", "argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def", "AA to match GG in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\"", "not equal the expected guide target sequences Args: file: string, path name of", "'--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective,", "Aligning requires MAFFT, so override this function and output simple # test sequences;", "AA isn't allowed in 1st window by specificity fasta, # so 1st window", "argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0'])", "class TestDesign(object): \"\"\"General class for testing design.py Defines helper functions for test cases", "greedy binary prediction with 0 mismatches # (so same outputs as min-guides) expected", "as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w',", "function and output simple # test sequences; store original so it can be", "test if the test output guide target sequences do not equal the expected", "self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123 is the taxonomic ID used", "expected: list of lists of strings, all the expected guide target sequences in", "fixed for future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return", "test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def", "Create a temporary output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file',", "\"GGCTT\" # Specificity seq stops AA from being the best guide in the", "open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file self.sp_file", "header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary fasta file for specificity self.sp_fasta =", "{seq: seqs[seq] for seq in seqs \\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref", "= align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq in", "output, fails the test if the test output guide target sequences do not", "specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it can", "cluster, so store the name of that file self.real_output_file = self.output_file.name + '.0.tsv'", "\"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops AA from being the best", "since we're # doing a simple test case, so override this function; store", "Disable logging logging.disable(logging.INFO) # Create a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False)", "class for testing design.py Defines helper functions for test cases and basic setUp", "in output col_loc = headers.index(header) continue self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc]", "Since sequences are short and need 1 base for primer on each side,", "of the test output Given a TSV file of test output and expected", "align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align = self.align prepare_alignment.fetch_sequences_for_taxonomy = self.fetch_sequences_for_taxonomy super().tearDown()", "f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False)", "import Namespace from adapt import alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment from", "= [self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results of the", "specific: None, 'fasta', or 'taxa'; what sort of input to be specific against", "a temporary output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name)", "store original so # it can be fixed for future tests self.curate_against_ref =", "= small_curate # Aligning requires MAFFT, so override this function and output simple", "sequences for the specificity file since we're # doing a simple test case,", "small_fetch(taxid, segment): # 123 is the taxonomic ID used in our specificity file", "equal the expected guide target sequences Args: file: string, path name of the", "in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py", "def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv) try: design.run(args) except", "None # Curating requires MAFFT, so override this function; store original so #", "align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq in seqs", "case is testing. See design.py help for details on input Args: search_type: 'sliding-window'", "is the taxonomic ID used in our specificity file if taxid == 123:", "self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref", "argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl', '1', '--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm',", "common with a 2 base guide counts as a match if specific ==", "by giving a fake path to MAFFT. All are expected to return a", "in 1st window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"]", "a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name]", "# Closes the file so that it can be reopened on Windows self.sp_fasta.close()", "def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same", "argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type ==", "self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv =", "f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory self.output_dir = tempfile.TemporaryDirectory() def test_auto_from_file(self):", "be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args", "input_type='fasta', objective='minimize-guides', model=False, specific=None, specificity_file=None, output_loc=None): \"\"\"Get arguments for tests Produces the correct", "basic setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging", "specific against output_loc: path to the output file/directory; set to self.output_file.name if None", "test case given details of what the test case is testing. See design.py", "'--seed', '0', '-gl', '2']) return argv def tearDown(self): for file in self.files_to_delete: if", "= design.argv_to_args(argv) design.run(args) # Since sequences are short and need 1 base for", "test_specificity_fasta, as sequences are the same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file,", "# Fix all overridden functions align.set_mafft_exec = self.set_mafft_exec align.curate_against_ref = self.curate_against_ref align.align =", "<<EMAIL>>' # Default args: window size 3, guide size 2, allow GU pairing", "\\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires MAFFT,", "design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try:", "specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta, as sequences are", "CSV that contains the guide target sequences \"\"\" col_loc = None with open(file)", "tests Produces the correct arguments for a test case given details of what", "import alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from", "3, guide size 2, allow GU pairing # GU pairing allows AA to", "each side, # only finds 1 target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file,", "isn't allowed in 1st window by specificity fasta, # so 1st window changes", "remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires MAFFT, so override this function and", "tests self.align = align.align align.align = lambda seqs, am=None: SEQS # We don't", "the file expected: list of lists of strings, all the expected guide target", "class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file", "search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity'", "doing a simple test case, so override this function; store original # so", "test output and expected output, fails the test if the test output guide", "from being the best guide in the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"]", "\"\"\"Tests for design.py \"\"\" import random import os import copy import tempfile import", "tempfile.TemporaryDirectory() def test_auto_from_file(self): argv = super().baseArgv(input_type='auto-from-file', output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except", "segment): # 123 is the taxonomic ID used in our specificity file if", "ID, test fetching the real # sequences, but don't return them as they", "expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all", "prediction specific: None, 'fasta', or 'taxa'; what sort of input to be specific", "Will raise an error if header is not in output col_loc = headers.index(header)", "input_file = self.input_file.name if output_loc is None: output_loc = self.output_file.name argv = ['design.py',", "MAFFT, so override this function and output simple # test sequences; store original", "so 1st window changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class", "or 'taxa'; what sort of input to be specific against output_loc: path to", "argv = super().baseArgv() args = design.argv_to_args(argv) design.run(args) # Base args set the percentage", "and expected output, fails the test if the test output guide target sequences", "# Create a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes", "2, allow GU pairing # GU pairing allows AA to match GG in", "counts as a match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif", "# Will raise an error if header is not in output col_loc =", "\"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all overridden functions align.set_mafft_exec", "for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window',", "expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't use", "or 'maximize-activity' model: boolean, true to use Cas13a built in model, false to", "do not equal the expected guide target sequences Args: file: string, path name", "'taxa'; what sort of input to be specific against output_loc: path to the", "return argv def tearDown(self): for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file) # Re-enable", "the arguments of the test \"\"\" input_file = self.input_file.name if output_loc is None:", "sequences to match at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def", "the specificity taxonomic ID, test fetching the real # sequences, but don't return", "# We don't want to fetch sequences for the specificity file since we're", "store original so it can be fixed for future tests self.align = align.align", "of the test \"\"\" input_file = self.input_file.name if output_loc is None: output_loc =", "in common with a 2 base guide counts as a match if specific", "the correct arguments for a test case given details of what the test", "logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) #", "override this function; store original so # it can be fixed for future", "annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv)", "design.run(args) # Base args set the percentage of sequences to match at 75%", "logging from collections import OrderedDict from argparse import Namespace from adapt import alignment", "functions for test cases and basic setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase):", "fake path to MAFFT. All are expected to return a FileNotFoundError \"\"\" def", "the file so that it can be reopened on Windows self.sp_file.close() # 'auto-from-args'", "sequences are the same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def", "args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args')", "output and expected output, fails the test if the test output guide target", "and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO) #", "as a match if specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific", "name of the file expected: list of lists of strings, all the expected", "not run the entire design.py; prematurely stops by giving a fake path to", "path to the output file/directory; set to self.output_file.name if None Returns: List of", "finds 1 target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected, header='guide-target-sequences') def test_specificity_fastas(self):", "'<NAME> <<EMAIL>>' # Default args: window size 3, guide size 2, allow GU", "argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2'])", "'-gl', '2']) return argv def tearDown(self): for file in self.files_to_delete: if os.path.isfile(file): os.unlink(file)", "output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs',", "design.argv_to_args(argv) design.run(args) # AA isn't allowed in 1st window by specificity fasta, #", "simple test case, so override this function; store original # so it can", "SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity seq stops", "# doing a simple test case, so override this function; store original #", "design.run(args) # Since sequences are short and need 1 base for primer on", "it can be fixed for future tests self.align = align.align align.align = lambda", "self.check_results(self.real_output_file, expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since", "model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl',", "setUp(self): super().setUp() # Write to temporary input file with open(self.input_file.name, 'w') as f:", "TestDesign(object): \"\"\"General class for testing design.py Defines helper functions for test cases and", "not in remove_ref_accs} align.curate_against_ref = small_curate # Aligning requires MAFFT, so override this", "but don't return them as they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return", "# Write to temporary input file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\")", "open(file) as f: for i, line in enumerate(f): if i == 0: headers", "the 1st window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class", "i, line in enumerate(f): if i == 0: headers = line.split('\\t') # Will", "if None Returns: List of strings that are the arguments of the test", "be fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): # 123", "\"AA---\" class TestDesign(object): \"\"\"General class for testing design.py Defines helper functions for test", "reopened on Windows self.input_file.close() # Create a temporary output file self.output_file = tempfile.NamedTemporaryFile(mode='w',", "binary prediction specific: None, 'fasta', or 'taxa'; what sort of input to be", "input to be specific against output_loc: path to the output file/directory; set to", "'--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets',", "args: window size 3, guide size 2, allow GU pairing # GU pairing", "Namespace from adapt import alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils", "temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") # Closes the file so", "test only produces 1 cluster, so store the name of that file self.real_output_file", "= \"AA---\" class TestDesign(object): \"\"\"General class for testing design.py Defines helper functions for", "except FileNotFoundError: pass def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through", "TSV file of test output and expected output, fails the test if the", "args = design.argv_to_args(argv) design.run(args) # Since sequences are short and need 1 base", "Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args)", "be set to 0 since otherwise # having 1 base in common with", "for seq in seqs \\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate", "it can be fixed for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda", "import align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from bin import design __author__", "a temporary fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the", "file, expected, header='target-sequences'): \"\"\"Check the results of the test output Given a TSV", "header of the CSV that contains the guide target sequences \"\"\" col_loc =", "align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None # Curating requires MAFFT, so override this", "lambda mafft_path: None # Curating requires MAFFT, so override this function; store original", "continue self.assertLess(i, len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ') for", "of lists of strings, all the expected guide target sequences in each line", "== 'sliding-window': argv.extend(['-w', '3']) if search_type == 'complete-targets': argv.extend(['--best-n-targets', '2', '-pp', '.75', '-pl',", "fetching the real # sequences, but don't return them as they won't be", "argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if", "we're # doing a simple test case, so override this function; store original", "= super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file=self.sp_file.name) args = design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta,", "specific == 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file,", "Cas13a built in model, false to use simple binary prediction specific: None, 'fasta',", "on input Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective:", "=='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return argv def", "specificity file if taxid == 123: return SP_SEQS # If it's not the", "be reopened on Windows self.input_file.close() # Create a temporary output file self.output_file =", "expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically download FASTAs Does not", "argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since sequences are short and", "== 'fasta': argv.extend(['--specific-against-fastas', specificity_file, '--id-m', '0']) elif specific == 'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m',", "window SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\"", "Create a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file", "cases and basic setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): #", "') for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self,", "specificity fasta, # so 1st window changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]]", "Closes the file so that it can be reopened on Windows self.sp_file.close() #", "expected, header='guide-target-sequences') def test_specificity_fastas(self): # Create a temporary fasta file for specificity self.sp_fasta", "# it can be fixed for future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs,", "= tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that it can be reopened", "import unittest import logging from collections import OrderedDict from argparse import Namespace from", "'taxa': argv.extend(['--specific-against-taxa', specificity_file, '--id-m', '0']) if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm',", "elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be considered identical) must", "output_loc = self.output_file.name argv = ['design.py', search_type, input_type] if input_type == 'fasta': argv.extend([input_file,", "and output simple # test sequences; store original so it can be fixed", "boolean, true to use Cas13a built in model, false to use simple binary", "0 mismatches # (so same outputs as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]]", "'greedy']) # ID-M (mismatches to be considered identical) must be set to 0", "line in enumerate(f): if i == 0: headers = line.split('\\t') # Will raise", "'fasta', or 'taxa'; what sort of input to be specific against output_loc: path", "with a 2 base guide counts as a match if specific == 'fasta':", "tempfile.NamedTemporaryFile(mode='w', delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check", "a fake path to MAFFT. All are expected to return a FileNotFoundError \"\"\"", "fetch sequences for the specificity file since we're # doing a simple test", "won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable", "os.path.isfile(file): os.unlink(file) # Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input", "# If it's not the specificity taxonomic ID, test fetching the real #", "or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean, true to use Cas13a built", "design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv = super().baseArgv(input_type='auto-from-args') args =", "for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None # Curating", "SEQS = OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"]", "simple # test sequences; store original so it can be fixed for future", "= lambda seqs, am=None: SEQS # We don't want to fetch sequences for", "am=None: SEQS # We don't want to fetch sequences for the specificity file", "input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if", "[[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv)", "by specificity fasta, # so 1st window changes expected = [[\"AC\", \"GG\"], [\"CT\"],", "self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results", "[\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv = super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args)", "the same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): #", "'.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta seq_io.write_fasta(SEQS, self.input_file.name) def test_min_guides(self): argv", "input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean, true to", "changes expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py", "# it can be fixed for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec =", "# AA isn't allowed in 1st window by specificity fasta, # so 1st", "self.check_results(self.real_output_file, expected) class TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically download FASTAs Does", "def setUp(self): # Disable logging logging.disable(logging.INFO) # Create a temporary input file self.input_file", "side, # only finds 1 target in middle expected = [[\"CT\"]] self.check_results(self.real_output_file, expected,", "= design.argv_to_args(argv) design.run(args) # Same output as test_specificity_fasta, as sequences are the same", "adapt import alignment from adapt.prepare import align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io", "original so it can be fixed for future tests self.align = align.align align.align", "expected output, fails the test if the test output guide target sequences do", "future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None # Curating requires", "# test sequences; store original so it can be fixed for future tests", "design.py given an input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name +", "[[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self): # Fix all overridden functions", "self.sp_file.write(\"123\\tNone\\n\") # Closes the file so that it can be reopened on Windows", "output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file,", "argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be considered identical) must be set to", "fixed for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec = lambda mafft_path: None #", "help for details on input Args: search_type: 'sliding-window' or 'complete-targets' input_type: 'fasta', 'auto-from-args',", "'complete-targets' input_type: 'fasta', 'auto-from-args', or 'auto-from-file' objective: 'minimize-guides' or 'maximize-activity' model: boolean, true", "Defines helper functions for test cases and basic setUp and tearDown functions. \"\"\"", "collections import OrderedDict from argparse import Namespace from adapt import alignment from adapt.prepare", "an input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file)", "TestDesignAutos(TestDesign.TestDesignCase): \"\"\"Test design.py given arguments to automatically download FASTAs Does not run the", "output_loc=self.output_dir.name) args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_auto_from_args(self): argv =", "'2']) if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm',", "0 since otherwise # having 1 base in common with a 2 base", "of that file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot", "[self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results of the test", "expected) def test_complete_targets(self): argv = super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since sequences", "with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory", "they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch #", "import random import os import copy import tempfile import unittest import logging from", "elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc])", "virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary specificity file self.sp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) self.sp_file.write(\"123\\tNone\\n\") #", "design.run(args) # Doesn't use model, just greedy binary prediction with 0 mismatches #", "test cases and basic setUp and tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self):", "details of what the test case is testing. See design.py help for details", "argv.extend([input_file, output_loc]) if input_type in ['auto-from-args', 'auto-from-file']: argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type", "path name of the file expected: list of lists of strings, all the", "to automatically download FASTAs Does not run the entire design.py; prematurely stops by", "match at 75% expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def test_max_activity(self): argv", "self.output_file.name argv = ['design.py', search_type, input_type] if input_type == 'fasta': argv.extend([input_file, '-o', output_loc])", "FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write", "align, ncbi_neighbors, prepare_alignment from adapt.utils import seq_io from bin import design __author__ =", "used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning logging", "# Since sequences are short and need 1 base for primer on each", "given arguments to automatically download FASTAs Does not run the entire design.py; prematurely", "See design.py help for details on input Args: search_type: 'sliding-window' or 'complete-targets' input_type:", "Given a TSV file of test output and expected output, fails the test", "input file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary", "= super().baseArgv(search_type='complete-targets') args = design.argv_to_args(argv) design.run(args) # Since sequences are short and need", "guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta',", "argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file': argv.extend([input_file, output_loc]) if input_type in ['auto-from-args',", "if model: argv.append('--predict-cas13a-activity-model') elif objective =='maximize-activity': argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0',", "def tearDown(self): super().tearDown() self.output_dir.cleanup() class TestDesignFull(TestDesign.TestDesignCase): \"\"\"Test design.py fully through \"\"\" def setUp(self):", "self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the results of the test output", "target sequences Args: file: string, path name of the file expected: list of", "seq in seqs \\ if seq.split('.')[0] not in remove_ref_accs} align.curate_against_ref = small_curate #", "are the same expected = [[\"AC\", \"GG\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected) def tearDown(self):", "use Cas13a built in model, false to use simple binary prediction specific: None,", "for a test case given details of what the test case is testing.", "tearDown functions. \"\"\" class TestDesignCase(unittest.TestCase): def setUp(self): # Disable logging logging.disable(logging.INFO) # Create", "'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create a temporary output directory self.output_dir =", "for future tests self.align = align.align align.align = lambda seqs, am=None: SEQS #", "be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch # Disable warning", "delete=False) self.output_file.close() self.files_to_delete = [self.input_file.name, self.output_file.name] def check_results(self, file, expected, header='target-sequences'): \"\"\"Check the", "'-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type == 'auto-from-file':", "line of the output header: the header of the CSV that contains the", "def test_specificity_fastas(self): # Create a temporary fasta file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w',", "future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]): return {seq: seqs[seq]", "output simple # test sequences; store original so it can be fixed for", "TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA \"\"\" def setUp(self): super().setUp() self.real_output_file =", "arguments to automatically download FASTAs Does not run the entire design.py; prematurely stops", "import OrderedDict from argparse import Namespace from adapt import alignment from adapt.prepare import", "strings, all the expected guide target sequences in each line of the output", "built in model, false to use simple binary prediction specific: None, 'fasta', or", "lists of strings, all the expected guide target sequences in each line of", "be considered identical) must be set to 0 since otherwise # having 1", "taxid == 123: return SP_SEQS # If it's not the specificity taxonomic ID,", "= OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class for testing design.py Defines", "# having 1 base in common with a 2 base guide counts as", "to temporary input file with open(self.input_file.name, 'w') as f: f.write(\"Zika virus\\t64320\\tNone\\tNC_035889\\n\") # Create", "file self.real_output_file = self.output_file.name + '.0.tsv' self.files_to_delete.extend([self.sp_file.name, self.real_output_file]) # We cannot access MAFFT,", "super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self): argv =", "for primer on each side, # only finds 1 target in middle expected", "OrderedDict from argparse import Namespace from adapt import alignment from adapt.prepare import align,", "# (so same outputs as min-guides) expected = [[\"AA\"], [\"CT\"], [\"CT\"]] self.check_results(self.real_output_file, expected)", "= super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA isn't allowed in 1st", "Re-enable logging logging.disable(logging.NOTSET) class TestDesignFasta(TestDesign.TestDesignCase): \"\"\"Test design.py given an input FASTA \"\"\" def", "cannot access MAFFT, so override this function; store original so # it can", "argv = super().baseArgv(input_type='auto-from-args') args = design.argv_to_args(argv) try: design.run(args) except FileNotFoundError: pass def test_specificity_taxa(self):", "for i, line in enumerate(f): if i == 0: headers = line.split('\\t') #", "base for primer on each side, # only finds 1 target in middle", "contains the guide target sequences \"\"\" col_loc = None with open(file) as f:", "= \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" # Specificity", "super().setUp() self.real_output_file = self.output_file.name + '.tsv' self.files_to_delete.append(self.real_output_file) # Write to temporary input fasta", "it can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta',", "Disable warning logging to avoid annotation warning logging.disable(logging.WARNING) def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args',", "OrderedDict() SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\"", "line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1]))", "'--max-primers-at-site', '2']) if objective == 'minimize-guides': argv.extend(['-gm', '0', '-gp', '.75']) elif objective =='maximize-activity':", "ID-M (mismatches to be considered identical) must be set to 0 since otherwise", "model: boolean, true to use Cas13a built in model, false to use simple", "None: output_loc = self.output_file.name argv = ['design.py', search_type, input_type] if input_type == 'fasta':", "= super().baseArgv(objective='maximize-activity') args = design.argv_to_args(argv) design.run(args) # Doesn't use model, just greedy binary", "= design.argv_to_args(argv) design.run(args) # Doesn't use model, just greedy binary prediction with 0", "file for specificity self.sp_fasta = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes the file so that", "as they won't be used else: self.fetch_sequences_for_taxonomy(taxid, segment) return SEQS prepare_alignment.fetch_sequences_for_taxonomy = small_fetch", "len(expected) + 1) guide_line = line.split('\\t')[col_loc] guides = guide_line.split(' ') for guide in", "input_type] if input_type == 'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320',", "'-gp', '.75']) elif objective =='maximize-activity': argv.extend(['--maximization-algorithm', 'greedy']) # ID-M (mismatches to be considered", "self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name) argv = super().baseArgv(specific='fasta', specificity_file=self.sp_fasta.name) args = design.argv_to_args(argv) design.run(args) # AA isn't", "file so that it can be reopened on Windows self.sp_fasta.close() seq_io.write_fasta(SP_SEQS, self.sp_fasta.name) self.files_to_delete.append(self.sp_fasta.name)", "All are expected to return a FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write", "can be fixed for future tests self.fetch_sequences_for_taxonomy = prepare_alignment.fetch_sequences_for_taxonomy def small_fetch(taxid, segment): #", "window SP_SEQS = OrderedDict() SP_SEQS[\"genome_5\"] = \"AA---\" class TestDesign(object): \"\"\"General class for testing", "copy import tempfile import unittest import logging from collections import OrderedDict from argparse", "be fixed for future tests self.curate_against_ref = align.curate_against_ref def small_curate(seqs, ref_accs, asm=None, remove_ref_accs=[]):", "= '<NAME> <<EMAIL>>' # Default args: window size 3, guide size 2, allow", "argv.extend(['--use-simple-binary-activity-prediction', '-gm', '0']) argv.extend(['--obj', objective, '--seed', '0', '-gl', '2']) return argv def tearDown(self):", "(mismatches to be considered identical) must be set to 0 since otherwise #", "argv.extend(['--sample-seqs', '1', '--mafft-path', 'fake_path']) if search_type == 'sliding-window': argv.extend(['-w', '3']) if search_type ==", "SEQS[\"genome_1\"] = \"AACTA\" SEQS[\"genome_2\"] = \"AAACT\" SEQS[\"genome_3\"] = \"GGCTA\" SEQS[\"genome_4\"] = \"GGCTT\" #", "logging.disable(logging.INFO) # Create a temporary input file self.input_file = tempfile.NamedTemporaryFile(mode='w', delete=False) # Closes", "'fasta': argv.extend([input_file, '-o', output_loc]) elif input_type == 'auto-from-args': argv.extend(['64320', 'None', output_loc]) elif input_type", "self.align = align.align align.align = lambda seqs, am=None: SEQS # We don't want", "Default args: window size 3, guide size 2, allow GU pairing # GU", "# Base args set the percentage of sequences to match at 75% expected", "file so that it can be reopened on Windows self.input_file.close() # Create a", "list of lists of strings, all the expected guide target sequences in each", "asm=None, remove_ref_accs=[]): return {seq: seqs[seq] for seq in seqs \\ if seq.split('.')[0] not", "FileNotFoundError \"\"\" def setUp(self): super().setUp() # Write to temporary input file with open(self.input_file.name,", "i == 0: headers = line.split('\\t') # Will raise an error if header", "so # it can be fixed for future tests self.set_mafft_exec = align.set_mafft_exec align.set_mafft_exec", "in guides: self.assertIn(guide, expected[i-1]) self.assertEqual(len(guides), len(expected[i-1])) self.assertEqual(i, len(expected)) def baseArgv(self, search_type='sliding-window', input_type='fasta', objective='minimize-guides',", "1 cluster, so store the name of that file self.real_output_file = self.output_file.name +", "except FileNotFoundError: pass def test_specificity_taxa(self): argv = super().baseArgv(input_type='auto-from-args', specific='taxa', specificity_file='') args = design.argv_to_args(argv)", "to be specific against output_loc: path to the output file/directory; set to self.output_file.name", "__author__ = '<NAME> <<EMAIL>>' # Default args: window size 3, guide size 2," ]
[ "list, k): cars.sort() to_cover = cars[:k] mx = max(to_cover) mi = min(to_cover) return", "to_cover = cars[:k] mx = max(to_cover) mi = min(to_cover) return mx - (mi", "= int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result)", "int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__", "3)) # print(carParkingRoof([2, 10, 8, 17], 3)) # print(carParkingRoof([1, 2, 3, 10], 4))", "for _ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\")", "int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result) +", "12, 7], 3)) # print(carParkingRoof([2, 10, 8, 17], 3)) # print(carParkingRoof([1, 2, 3,", "-> tuple: fptr = open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip()) cars =", "range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result = carParkingRoof(cars,", "k = int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close()", "= cars[:k] mx = max(to_cover) mi = min(to_cover) return mx - (mi -", "\"w\") print(\"file opened\") cars_count = int(input().strip()) cars = [] for _ in range(cars_count):", "cars = [] for _ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k =", "__name__ == \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7], 3)) #", "# print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2, 10, 8,", "_ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result", "print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__ ==", "<filename>andela/car_park_roof.py<gh_stars>0 def carParkingRoof(cars: list, k): cars.sort() to_cover = cars[:k] mx = max(to_cover) mi", "== \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2,", "1) def process_file(filename: str) -> tuple: fptr = open(filename, \"w\") print(\"file opened\") cars_count", "= [] for _ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip())", "cars[:k] mx = max(to_cover) mi = min(to_cover) return mx - (mi - 1)", "3)) # print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2, 10, 8, 17], 3))", "2, 12, 7], 3)) # print(carParkingRoof([2, 10, 8, 17], 3)) # print(carParkingRoof([1, 2,", "return mx - (mi - 1) def process_file(filename: str) -> tuple: fptr =", "opened\") cars_count = int(input().strip()) cars = [] for _ in range(cars_count): cars_item =", "open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip()) cars = [] for _ in", "- (mi - 1) def process_file(filename: str) -> tuple: fptr = open(filename, \"w\")", "= open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip()) cars = [] for _", "process_file(filename: str) -> tuple: fptr = open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip())", "= int(input().strip()) cars = [] for _ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item)", "= int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if", "+ \"\\n\") fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2,", "\"\\n\") fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12,", "7], 3)) # print(carParkingRoof([2, 10, 8, 17], 3)) # print(carParkingRoof([1, 2, 3, 10],", "def process_file(filename: str) -> tuple: fptr = open(filename, \"w\") print(\"file opened\") cars_count =", "- 1) def process_file(filename: str) -> tuple: fptr = open(filename, \"w\") print(\"file opened\")", "print(\"file opened\") cars_count = int(input().strip()) cars = [] for _ in range(cars_count): cars_item", "fptr.write(str(result) + \"\\n\") fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6,", "def carParkingRoof(cars: list, k): cars.sort() to_cover = cars[:k] mx = max(to_cover) mi =", "print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2, 10, 8, 17],", "= carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1],", "result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__ == \"__main__\": #", "= max(to_cover) mi = min(to_cover) return mx - (mi - 1) def process_file(filename:", "cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\")", "cars_count = int(input().strip()) cars = [] for _ in range(cars_count): cars_item = int(input().strip())", "carParkingRoof(cars: list, k): cars.sort() to_cover = cars[:k] mx = max(to_cover) mi = min(to_cover)", "(mi - 1) def process_file(filename: str) -> tuple: fptr = open(filename, \"w\") print(\"file", "tuple: fptr = open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip()) cars = []", "= min(to_cover) return mx - (mi - 1) def process_file(filename: str) -> tuple:", "\"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2, 10,", "# print(carParkingRoof([2, 10, 8, 17], 3)) # print(carParkingRoof([1, 2, 3, 10], 4)) process_file(\"./andela/car_park_test_case.txt\")", "fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7],", "fptr = open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip()) cars = [] for", "int(input().strip()) cars = [] for _ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k", "[] for _ in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running", "mi = min(to_cover) return mx - (mi - 1) def process_file(filename: str) ->", "cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result = carParkingRoof(cars, k)", "if __name__ == \"__main__\": # print(carParkingRoof([1], 3)) # print(carParkingRoof([6, 2, 12, 7], 3))", "min(to_cover) return mx - (mi - 1) def process_file(filename: str) -> tuple: fptr", "print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2, 10, 8, 17], 3)) # print(carParkingRoof([1,", "k): cars.sort() to_cover = cars[:k] mx = max(to_cover) mi = min(to_cover) return mx", "mx = max(to_cover) mi = min(to_cover) return mx - (mi - 1) def", "in range(cars_count): cars_item = int(input().strip()) cars.append(cars_item) k = int(input().strip()) print(\"running code...\") result =", "max(to_cover) mi = min(to_cover) return mx - (mi - 1) def process_file(filename: str)", "str) -> tuple: fptr = open(filename, \"w\") print(\"file opened\") cars_count = int(input().strip()) cars", "code...\") result = carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__ == \"__main__\":", "# print(carParkingRoof([6, 2, 12, 7], 3)) # print(carParkingRoof([2, 10, 8, 17], 3)) #", "carParkingRoof(cars, k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1], 3))", "k) fptr.write(str(result) + \"\\n\") fptr.close() if __name__ == \"__main__\": # print(carParkingRoof([1], 3)) #", "mx - (mi - 1) def process_file(filename: str) -> tuple: fptr = open(filename,", "cars.sort() to_cover = cars[:k] mx = max(to_cover) mi = min(to_cover) return mx -" ]
[ "default = 'fix', type = str, help = 'Output pdf file path suffix", "when using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true', default =", "page is located on first page when using saddle stitch option.' ) front_page.add_argument('-fs',", "help = 'Split pdf page horizontally.') ### Rotate option ### # rotate parser.add_argument('-r',", "# Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true', default = False, help =", "Input File parser.add_argument('pdf_file_path', action = 'store', type = str, metavar = \"PDF_PATH\", help", "= 'store', type = int, choices = [90, 180, 270], help = 'Rotate", "type = str, help = 'Output pdf file path suffix before .pdf extention.'", "# Reverse output 3 parser.add_argument('-rv', '--reverse', action = 'store_true', default = False, help", "### # Verbose mode parser.add_argument('--verbose', action = 'store_true', default = False, help =", "parser.add_argument('-r', '--rotate', action = 'store', type = int, choices = [90, 180, 270],", "Output File parser.add_argument('-o', '--output', action = 'store', type = str, help = 'Output", "'store_true', default = False, help = 'Split pdf page horizontally.') ### Rotate option", "= 'store_true', default = False, help = 'Output pdf pages reversely.') ### Others", ") front_page.add_argument('-fs', '--front-second', action = 'store_true', default = True, help = 'First page", "action = 'store', type = str, help = 'Output pdf file path .'", "pdf page clockwise specified degree.' ) ### Front page location ### front_page =", "= True, help = 'First page is located on second page when using", "default = False, help = 'Run as verbose mode.' ) # Version parser.add_argument('-v',", "'Run as verbose mode.' ) # Version parser.add_argument('-v', '--version', action = 'version', version", "str, help = 'Output pdf file path .' ) # Output File suffix", "import argparse parser = argparse.ArgumentParser( prog = \"pydifier\", add_help = True) ### File", "page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default = False,", "### Rotate option ### # rotate parser.add_argument('-r', '--rotate', action = 'store', type =", "Version parser.add_argument('-v', '--version', action = 'version', version = '%(prog)s 0.1', help = 'Show", "= 'Split pdf page horizontally.') ### Rotate option ### # rotate parser.add_argument('-r', '--rotate',", "'fix', type = str, help = 'Output pdf file path suffix before .pdf", "extension parser.add_argument('-s', '--suffix', action = 'store', default = 'fix', type = str, help", "# Front page is located front_page.add_argument('-ff', '--front-first', action = 'store_true', default = False,", "File option ### # Input File parser.add_argument('pdf_file_path', action = 'store', type = str,", "= False, help = 'Split pdf page horizontally.') ### Rotate option ### #", "page is located on second page when using saddle stitch option.' ) ###", "3 parser.add_argument('-rv', '--reverse', action = 'store_true', default = False, help = 'Output pdf", "File parser.add_argument('pdf_file_path', action = 'store', type = str, metavar = \"PDF_PATH\", help =", "'store_true', default = False, help = 'Split pdf page vertically.') # horizontal split", "action = 'store_true', default = False, help = 'Output pdf pages reversely.') ###", "page horizontally.') ### Rotate option ### # rotate parser.add_argument('-r', '--rotate', action = 'store',", "### Front page location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front page is located", "pdf page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default =", "when using saddle stitch option.' ) ### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action", "pdf file path suffix before .pdf extention.' ) ### Split option ### split_group", "= 'Output pdf file path suffix before .pdf extention.' ) ### Split option", "'store_true', default = False, help = 'Output pdf pages reversely.') ### Others ###", "'--front-second', action = 'store_true', default = True, help = 'First page is located", ".pdf extention.' ) ### Split option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split", "Split option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action =", "parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true', default = False, help", "using saddle stitch option.' ) ### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action =", "= False, help = 'Run as verbose mode.' ) # Version parser.add_argument('-v', '--version',", "action = 'store_true', default = False, help = 'First page is located on", "Output File suffix before extension parser.add_argument('-s', '--suffix', action = 'store', default = 'fix',", "option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true',", "parser.add_argument('-o', '--output', action = 'store', type = str, help = 'Output pdf file", "= parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true', default = False,", "'--horizontal', action = 'store_true', default = False, help = 'Split pdf page horizontally.')", "pdf page horizontally.') ### Rotate option ### # rotate parser.add_argument('-r', '--rotate', action =", "using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true', default = True,", "is located on first page when using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second',", "# Verbose mode parser.add_argument('--verbose', action = 'store_true', default = False, help = 'Run", "split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default = False, help = 'Split pdf", "parser.add_argument('-v', '--version', action = 'version', version = '%(prog)s 0.1', help = 'Show version.'", "mode.' ) # Version parser.add_argument('-v', '--version', action = 'version', version = '%(prog)s 0.1',", "output 3 parser.add_argument('-rv', '--reverse', action = 'store_true', default = False, help = 'Output", "### # Input File parser.add_argument('pdf_file_path', action = 'store', type = str, metavar =", "Reverse output 3 parser.add_argument('-rv', '--reverse', action = 'store_true', default = False, help =", "default = False, help = 'Split pdf page horizontally.') ### Rotate option ###", "file path to be fixed.' ) # Output File parser.add_argument('-o', '--output', action =", "= str, metavar = \"PDF_PATH\", help = 'Pdf file path to be fixed.'", "help = 'First page is located on first page when using saddle stitch", "option ### # rotate parser.add_argument('-r', '--rotate', action = 'store', type = int, choices", "'First page is located on first page when using saddle stitch option.' )", "first page when using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true',", "= False, help = 'Output pdf pages reversely.') ### Others ### # Verbose", "False, help = 'Split pdf page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action", "= False, help = 'Scanned PDF that is saddle stich binding.' ) ###", "parser.add_argument('-s', '--suffix', action = 'store', default = 'fix', type = str, help =", "### Order option ### # Reverse output 3 parser.add_argument('-rv', '--reverse', action = 'store_true',", "'Output pdf file path .' ) # Output File suffix before extension parser.add_argument('-s',", "path suffix before .pdf extention.' ) ### Split option ### split_group = parser.add_mutually_exclusive_group(required=False)", "= parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff', '--front-first', action = 'store_true', default", "= 'First page is located on second page when using saddle stitch option.'", "File parser.add_argument('-o', '--output', action = 'store', type = str, help = 'Output pdf", "option ### # Input File parser.add_argument('pdf_file_path', action = 'store', type = str, metavar", "split_group.add_argument('-sh', '--horizontal', action = 'store_true', default = False, help = 'Split pdf page", "default = False, help = 'Split pdf page vertically.') # horizontal split split_group.add_argument('-sh',", "default = True, help = 'First page is located on second page when", "before extension parser.add_argument('-s', '--suffix', action = 'store', default = 'fix', type = str,", "action = 'store_true', default = True, help = 'First page is located on", "= 'store_true', default = False, help = 'Scanned PDF that is saddle stich", "# rotate parser.add_argument('-r', '--rotate', action = 'store', type = int, choices = [90,", "page location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff', '--front-first',", "prog = \"pydifier\", add_help = True) ### File option ### # Input File", "= \"PDF_PATH\", help = 'Pdf file path to be fixed.' ) # Output", ".' ) # Output File suffix before extension parser.add_argument('-s', '--suffix', action = 'store',", "str, help = 'Output pdf file path suffix before .pdf extention.' ) ###", "180, 270], help = 'Rotate pdf page clockwise specified degree.' ) ### Front", "page clockwise specified degree.' ) ### Front page location ### front_page = parser.add_mutually_exclusive_group(required=False)", "= 'store_true', default = True, help = 'First page is located on second", "### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default = False, help = 'Scanned PDF", "\"pydifier\", add_help = True) ### File option ### # Input File parser.add_argument('pdf_file_path', action", "argparse.ArgumentParser( prog = \"pydifier\", add_help = True) ### File option ### # Input", "'--saddle-stitch', action = 'store_true', default = False, help = 'Scanned PDF that is", "be fixed.' ) # Output File parser.add_argument('-o', '--output', action = 'store', type =", "parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff', '--front-first', action = 'store_true', default =", "'Scanned PDF that is saddle stich binding.' ) ### Order option ### #", ") # Output File parser.add_argument('-o', '--output', action = 'store', type = str, help", "'First page is located on second page when using saddle stitch option.' )", "= 'store', type = str, metavar = \"PDF_PATH\", help = 'Pdf file path", "action = 'store', type = str, metavar = \"PDF_PATH\", help = 'Pdf file", "action = 'store_true', default = False, help = 'Run as verbose mode.' )", "argparse parser = argparse.ArgumentParser( prog = \"pydifier\", add_help = True) ### File option", "front_page.add_argument('-fs', '--front-second', action = 'store_true', default = True, help = 'First page is", "file path suffix before .pdf extention.' ) ### Split option ### split_group =", "### front_page = parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff', '--front-first', action =", "clockwise specified degree.' ) ### Front page location ### front_page = parser.add_mutually_exclusive_group(required=False) #", "before .pdf extention.' ) ### Split option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical", "is saddle stich binding.' ) ### Order option ### # Reverse output 3", "270], help = 'Rotate pdf page clockwise specified degree.' ) ### Front page", "= str, help = 'Output pdf file path suffix before .pdf extention.' )", "False, help = 'Output pdf pages reversely.') ### Others ### # Verbose mode", "= 'fix', type = str, help = 'Output pdf file path suffix before", "= 'store_true', default = False, help = 'Run as verbose mode.' ) #", "parser.add_argument('--verbose', action = 'store_true', default = False, help = 'Run as verbose mode.'", "= 'Run as verbose mode.' ) # Version parser.add_argument('-v', '--version', action = 'version',", "verbose mode.' ) # Version parser.add_argument('-v', '--version', action = 'version', version = '%(prog)s", "Rotate option ### # rotate parser.add_argument('-r', '--rotate', action = 'store', type = int,", "False, help = 'Split pdf page horizontally.') ### Rotate option ### # rotate", "= 'Pdf file path to be fixed.' ) # Output File parser.add_argument('-o', '--output',", "page when using saddle stitch option.' ) ### Binding option ### parser.add_argument('-ss', '--saddle-stitch',", "type = str, metavar = \"PDF_PATH\", help = 'Pdf file path to be", "parser.add_argument('-rv', '--reverse', action = 'store_true', default = False, help = 'Output pdf pages", "default = False, help = 'Scanned PDF that is saddle stich binding.' )", "'--reverse', action = 'store_true', default = False, help = 'Output pdf pages reversely.')", "file path .' ) # Output File suffix before extension parser.add_argument('-s', '--suffix', action", "### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true', default", "specified degree.' ) ### Front page location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front", "stitch option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true', default = True, help =", "degree.' ) ### Front page location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front page", "suffix before extension parser.add_argument('-s', '--suffix', action = 'store', default = 'fix', type =", "action = 'store_true', default = False, help = 'Split pdf page vertically.') #", "type = str, help = 'Output pdf file path .' ) # Output", "help = 'Rotate pdf page clockwise specified degree.' ) ### Front page location", "parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default = False, help = 'Scanned PDF that", "stich binding.' ) ### Order option ### # Reverse output 3 parser.add_argument('-rv', '--reverse',", "File suffix before extension parser.add_argument('-s', '--suffix', action = 'store', default = 'fix', type", "= int, choices = [90, 180, 270], help = 'Rotate pdf page clockwise", "help = 'First page is located on second page when using saddle stitch", "that is saddle stich binding.' ) ### Order option ### # Reverse output", "= 'store', default = 'fix', type = str, help = 'Output pdf file", "True, help = 'First page is located on second page when using saddle", "= True) ### File option ### # Input File parser.add_argument('pdf_file_path', action = 'store',", "### File option ### # Input File parser.add_argument('pdf_file_path', action = 'store', type =", "saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true', default = True, help", "suffix before .pdf extention.' ) ### Split option ### split_group = parser.add_mutually_exclusive_group(required=False) #", "False, help = 'Run as verbose mode.' ) # Version parser.add_argument('-v', '--version', action", "'Rotate pdf page clockwise specified degree.' ) ### Front page location ### front_page", "front_page = parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff', '--front-first', action = 'store_true',", ") ### Front page location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front page is", "<gh_stars>0 import argparse parser = argparse.ArgumentParser( prog = \"pydifier\", add_help = True) ###", "help = 'Pdf file path to be fixed.' ) # Output File parser.add_argument('-o',", "action = 'store_true', default = False, help = 'Scanned PDF that is saddle", "\"PDF_PATH\", help = 'Pdf file path to be fixed.' ) # Output File", "split split_group.add_argument('-sv', '--vertical', action = 'store_true', default = False, help = 'Split pdf", "False, help = 'First page is located on first page when using saddle", "= 'Rotate pdf page clockwise specified degree.' ) ### Front page location ###", "# Output File suffix before extension parser.add_argument('-s', '--suffix', action = 'store', default =", "on second page when using saddle stitch option.' ) ### Binding option ###", "stitch option.' ) ### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default", "= 'store', type = str, help = 'Output pdf file path .' )", "metavar = \"PDF_PATH\", help = 'Pdf file path to be fixed.' ) #", "to be fixed.' ) # Output File parser.add_argument('-o', '--output', action = 'store', type", "extention.' ) ### Split option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv',", "mode parser.add_argument('--verbose', action = 'store_true', default = False, help = 'Run as verbose", "'store_true', default = False, help = 'First page is located on first page", "action = 'store', type = int, choices = [90, 180, 270], help =", "rotate parser.add_argument('-r', '--rotate', action = 'store', type = int, choices = [90, 180,", "located front_page.add_argument('-ff', '--front-first', action = 'store_true', default = False, help = 'First page", "'Output pdf pages reversely.') ### Others ### # Verbose mode parser.add_argument('--verbose', action =", "= 'Output pdf file path .' ) # Output File suffix before extension", "split_group.add_argument('-sv', '--vertical', action = 'store_true', default = False, help = 'Split pdf page", ") ### Split option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical',", "help = 'Run as verbose mode.' ) # Version parser.add_argument('-v', '--version', action =", "path .' ) # Output File suffix before extension parser.add_argument('-s', '--suffix', action =", "= False, help = 'Split pdf page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal',", "'--output', action = 'store', type = str, help = 'Output pdf file path", "= 'Output pdf pages reversely.') ### Others ### # Verbose mode parser.add_argument('--verbose', action", "default = False, help = 'Output pdf pages reversely.') ### Others ### #", "= 'store_true', default = False, help = 'Split pdf page horizontally.') ### Rotate", "saddle stitch option.' ) ### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true',", "pdf pages reversely.') ### Others ### # Verbose mode parser.add_argument('--verbose', action = 'store_true',", "is located front_page.add_argument('-ff', '--front-first', action = 'store_true', default = False, help = 'First", "type = int, choices = [90, 180, 270], help = 'Rotate pdf page", "Binding option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default = False, help =", "'--rotate', action = 'store', type = int, choices = [90, 180, 270], help", "add_help = True) ### File option ### # Input File parser.add_argument('pdf_file_path', action =", "front_page.add_argument('-ff', '--front-first', action = 'store_true', default = False, help = 'First page is", "= \"pydifier\", add_help = True) ### File option ### # Input File parser.add_argument('pdf_file_path',", "saddle stich binding.' ) ### Order option ### # Reverse output 3 parser.add_argument('-rv',", "action = 'store_true', default = False, help = 'Split pdf page horizontally.') ###", ") ### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default = False,", "Order option ### # Reverse output 3 parser.add_argument('-rv', '--reverse', action = 'store_true', default", "### Split option ### split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action", "'Split pdf page horizontally.') ### Rotate option ### # rotate parser.add_argument('-r', '--rotate', action", "'store', default = 'fix', type = str, help = 'Output pdf file path", "default = False, help = 'First page is located on first page when", "# Input File parser.add_argument('pdf_file_path', action = 'store', type = str, metavar = \"PDF_PATH\",", "### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default = False, help", "page is located front_page.add_argument('-ff', '--front-first', action = 'store_true', default = False, help =", "parser.add_argument('pdf_file_path', action = 'store', type = str, metavar = \"PDF_PATH\", help = 'Pdf", "'Pdf file path to be fixed.' ) # Output File parser.add_argument('-o', '--output', action", "reversely.') ### Others ### # Verbose mode parser.add_argument('--verbose', action = 'store_true', default =", "'store_true', default = False, help = 'Scanned PDF that is saddle stich binding.'", ") # Version parser.add_argument('-v', '--version', action = 'version', version = '%(prog)s 0.1', help", "'Split pdf page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default", ") ### Order option ### # Reverse output 3 parser.add_argument('-rv', '--reverse', action =", "horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default = False, help = 'Split", "Front page location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff',", "'store_true', default = False, help = 'Run as verbose mode.' ) # Version", "= str, help = 'Output pdf file path .' ) # Output File", "page when using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true', default", "'--version', action = 'version', version = '%(prog)s 0.1', help = 'Show version.' )", "Verbose mode parser.add_argument('--verbose', action = 'store_true', default = False, help = 'Run as", ") # Output File suffix before extension parser.add_argument('-s', '--suffix', action = 'store', default", "option.' ) ### Binding option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default =", "'store', type = str, help = 'Output pdf file path .' ) #", "True) ### File option ### # Input File parser.add_argument('pdf_file_path', action = 'store', type", "Front page is located front_page.add_argument('-ff', '--front-first', action = 'store_true', default = False, help", "= argparse.ArgumentParser( prog = \"pydifier\", add_help = True) ### File option ### #", "option ### # Reverse output 3 parser.add_argument('-rv', '--reverse', action = 'store_true', default =", "location ### front_page = parser.add_mutually_exclusive_group(required=False) # Front page is located front_page.add_argument('-ff', '--front-first', action", "= 'store_true', default = False, help = 'First page is located on first", "= False, help = 'First page is located on first page when using", "binding.' ) ### Order option ### # Reverse output 3 parser.add_argument('-rv', '--reverse', action", "on first page when using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action =", "'Output pdf file path suffix before .pdf extention.' ) ### Split option ###", "'store', type = str, metavar = \"PDF_PATH\", help = 'Pdf file path to", "located on first page when using saddle stitch option.' ) front_page.add_argument('-fs', '--front-second', action", "help = 'Scanned PDF that is saddle stich binding.' ) ### Order option", "PDF that is saddle stich binding.' ) ### Order option ### # Reverse", "Others ### # Verbose mode parser.add_argument('--verbose', action = 'store_true', default = False, help", "action = 'store', default = 'fix', type = str, help = 'Output pdf", "horizontally.') ### Rotate option ### # rotate parser.add_argument('-r', '--rotate', action = 'store', type", "# Version parser.add_argument('-v', '--version', action = 'version', version = '%(prog)s 0.1', help =", "parser = argparse.ArgumentParser( prog = \"pydifier\", add_help = True) ### File option ###", "### Others ### # Verbose mode parser.add_argument('--verbose', action = 'store_true', default = False,", "= 'First page is located on first page when using saddle stitch option.'", "is located on second page when using saddle stitch option.' ) ### Binding", "int, choices = [90, 180, 270], help = 'Rotate pdf page clockwise specified", "# horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default = False, help =", "'--front-first', action = 'store_true', default = False, help = 'First page is located", "option.' ) front_page.add_argument('-fs', '--front-second', action = 'store_true', default = True, help = 'First", "### # rotate parser.add_argument('-r', '--rotate', action = 'store', type = int, choices =", "second page when using saddle stitch option.' ) ### Binding option ### parser.add_argument('-ss',", "help = 'Split pdf page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action =", "[90, 180, 270], help = 'Rotate pdf page clockwise specified degree.' ) ###", "# Output File parser.add_argument('-o', '--output', action = 'store', type = str, help =", "= 'store_true', default = False, help = 'Split pdf page vertically.') # horizontal", "help = 'Output pdf pages reversely.') ### Others ### # Verbose mode parser.add_argument('--verbose',", "choices = [90, 180, 270], help = 'Rotate pdf page clockwise specified degree.'", "fixed.' ) # Output File parser.add_argument('-o', '--output', action = 'store', type = str,", "### # Reverse output 3 parser.add_argument('-rv', '--reverse', action = 'store_true', default = False,", "help = 'Output pdf file path .' ) # Output File suffix before", "path to be fixed.' ) # Output File parser.add_argument('-o', '--output', action = 'store',", "= [90, 180, 270], help = 'Rotate pdf page clockwise specified degree.' )", "located on second page when using saddle stitch option.' ) ### Binding option", "option ### parser.add_argument('-ss', '--saddle-stitch', action = 'store_true', default = False, help = 'Scanned", "help = 'Output pdf file path suffix before .pdf extention.' ) ### Split", "split_group = parser.add_mutually_exclusive_group(required=False) # Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true', default =", "'--suffix', action = 'store', default = 'fix', type = str, help = 'Output", "pages reversely.') ### Others ### # Verbose mode parser.add_argument('--verbose', action = 'store_true', default", "'--vertical', action = 'store_true', default = False, help = 'Split pdf page vertically.')", "= 'Split pdf page vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true',", "False, help = 'Scanned PDF that is saddle stich binding.' ) ### Order", "Vertical split split_group.add_argument('-sv', '--vertical', action = 'store_true', default = False, help = 'Split", "vertically.') # horizontal split split_group.add_argument('-sh', '--horizontal', action = 'store_true', default = False, help", "'store', type = int, choices = [90, 180, 270], help = 'Rotate pdf", "'store_true', default = True, help = 'First page is located on second page", "str, metavar = \"PDF_PATH\", help = 'Pdf file path to be fixed.' )", "= 'Scanned PDF that is saddle stich binding.' ) ### Order option ###", "pdf file path .' ) # Output File suffix before extension parser.add_argument('-s', '--suffix',", "as verbose mode.' ) # Version parser.add_argument('-v', '--version', action = 'version', version =" ]
[ "= { \"name\": \"DexParser\", \"desc\": \"Parsing Dex file into bytecode\" } def __init__(self,", "<reponame>BiteFoo/androyara<filename>androyara/core/dex_parser.py # coding:utf8 ''' @File : dex_parser.py @Author : Loopher @Version : 1.0", "coding:utf8 ''' @File : dex_parser.py @Author : Loopher @Version : 1.0 @License :", "@Author : Loopher @Version : 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析", ": Loopher @Version : 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 '''", "\"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\":", "\"DexParser\", \"desc\": \"Parsing Dex file into bytecode\" } def __init__(self, pkg, buff): self.vm", "Dex file into bytecode\" } def __init__(self, pkg, buff): self.vm = DexFileVM(pkg,buff) self.vm.build_map()", "1.0 @License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from", "{ \"name\": \"DexParser\", \"desc\": \"Parsing Dex file into bytecode\" } def __init__(self, pkg,", "\"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info = { \"name\":", "DexFileVM class DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing Dex file into", "\"name\": \"DexParser\", \"desc\": \"Parsing Dex file into bytecode\" } def __init__(self, pkg, buff):", ": dex_parser.py @Author : Loopher @Version : 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc", "''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info = {", "Loopher @Version : 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\"", "\"Parsing Dex file into bytecode\" } def __init__(self, pkg, buff): self.vm = DexFileVM(pkg,buff)", "@License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm", "parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing Dex file into bytecode\" } def", "每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info = { \"name\": \"DexParser\",", "class DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing Dex file into bytecode\"", "import DexFileVM class DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing Dex file", "DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing Dex file into bytecode\" }", ": (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import", "@Version : 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较", "dex_parser.py @Author : Loopher @Version : 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc :", "Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info =", "\"desc\": \"Parsing Dex file into bytecode\" } def __init__(self, pkg, buff): self.vm =", "@Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object):", "# coding:utf8 ''' @File : dex_parser.py @Author : Loopher @Version : 1.0 @License", "''' @File : dex_parser.py @Author : Loopher @Version : 1.0 @License : (C)Copyright", ": 1.0 @License : (C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\"", "from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing", "2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class", "@File : dex_parser.py @Author : Loopher @Version : 1.0 @License : (C)Copyright 2020-2021,Loopher", ": Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info", "(C)Copyright 2020-2021,Loopher @Desc : Dex文件解析 ''' \"\"\" 每一个dex都会经过这里的解析处理,目的是建立一个映射表能快速索引和比较 \"\"\" from androyara.dex.dex_vm import DexFileVM", "androyara.dex.dex_vm import DexFileVM class DexParser(object): parser_info = { \"name\": \"DexParser\", \"desc\": \"Parsing Dex" ]
[ "The package provides a number of modules to be used in the clustering,", "be used in the clustering, extraction, and evaluation of white matter tractography. \"\"\"", "provides a number of modules to be used in the clustering, extraction, and", "number of modules to be used in the clustering, extraction, and evaluation of", "a number of modules to be used in the clustering, extraction, and evaluation", "modules to be used in the clustering, extraction, and evaluation of white matter", "package provides a number of modules to be used in the clustering, extraction,", "of modules to be used in the clustering, extraction, and evaluation of white", "\"\"\" The package provides a number of modules to be used in the", "to be used in the clustering, extraction, and evaluation of white matter tractography." ]
[ "Address 1 Compressed: ' + caddr) f.write('\\nPublic Address 1 Uncompressed: ' + uaddr)", "', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic", "True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet = Wallet(HEX) uaddr", "\"%064x\" % ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr", "= wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr =", "+ str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and", "' + uaddr) f.write('\\nPublic Address 3 Segwit: ' + saddr) f.write('\\nPublic Address bc1", "Wait min range: \" + str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total", ", ' : Total : ', total, ' : HEX : ', HEX,", "bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made by mizogg.co.uk Donations", "Address bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made by mizogg.co.uk Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ====='", ": ',str (line_count)) x=int(input(\"'Start range in BITs 0 or Higher(Puzzle StartNumber) -> \"))", "range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking : ',str", "Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address 1 Compressed:", "256 Max (Puzzle StopNumber) -> \")) b = 2**y print(\"Starting search... Please Wait", "1 with open(filename) as file: add = file.read().split() add = set(add) print('Total Bitcoin", "uaddr) f.write('\\nPublic Address 3 Segwit: ' + saddr) f.write('\\nPublic Address bc1 P2WPKH: '", "', bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan : ', count ,", "+= 1 with open(filename) as file: add = file.read().split() add = set(add) print('Total", "\"\\n\" line_count += 1 with open(filename) as file: add = file.read().split() add =", "StartNumber) -> \")) a = 2**x y=int(input(\"Stop range Max in BITs 256 Max", "compressed: ', wifc, '\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed:", "uaddr, '\\nPublic Address 1 Compressed: ', caddr, '\\nPublic Address 3 Segwit: ', saddr,", "as file: add = file.read().split() add = set(add) print('Total Bitcoin Addresses Loaded and", "Checking : ',str (line_count)) count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX =", "Addresses Loaded and Checking : ',str (line_count)) x=int(input(\"'Start range in BITs 0 or", "= wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif,", "Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed: ', caddr, '\\nPublic Address 3 Segwit:", "(Puzzle StopNumber) -> \")) b = 2**y print(\"Starting search... Please Wait min range:", "uaddr in add or saddr in add or bcaddr in add or bc1addr", "print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) count=0 total=0 while True:", "print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex): ' + HEX)", "2**x y=int(input(\"Stop range Max in BITs 256 Max (Puzzle StopNumber) -> \")) b", "' : HEX : ', HEX, end='\\r') if caddr in add or uaddr", "= wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr", "bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex):", "1 Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed: ', caddr, '\\nPublic Address 3", "if caddr in add or uaddr in add or saddr in add or", "', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc,", "' + str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: ' + wif)", "+ str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey", "search... Please Wait min range: \" + str(a)) print(\"Max range: \" + str(b))", "Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address", "Bitcoin Addresses Loaded and Checking : ',str (line_count)) count=0 total=0 while True: count+=1", "bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan : ', count , '", "file: add = file.read().split() add = set(add) print('Total Bitcoin Addresses Loaded and Checking", "as f: line_count = 0 for line in f: line != \"\\n\" line_count", "import Wallet import random filename ='puzzle.txt' with open(filename) as f: line_count = 0", "Total : ', total, ' : HEX : ', HEX, end='\\r') if caddr", "HEX : ', HEX, end='\\r') if caddr in add or uaddr in add", "'\\nPublic Address 3 Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic", "import random filename ='puzzle.txt' with open(filename) as f: line_count = 0 for line", "= Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed", "print('Scan : ', count , ' : Total : ', total, ' :", "Address 1 Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed: ', caddr, '\\nPublic Address", "range: \" + str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses", "wifc) f.write('\\nPublic Address 1 Compressed: ' + caddr) f.write('\\nPublic Address 1 Uncompressed: '", "address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif =", "total, ' : HEX : ', HEX, end='\\r') if caddr in add or", "add or bcaddr in add or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey", "Loaded and Checking : ',str (line_count)) count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b)", "#print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed:", "uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr", "a = 2**x y=int(input(\"Stop range Max in BITs 256 Max (Puzzle StopNumber) ->", "HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address", "open(filename) as file: add = file.read().split() add = set(add) print('Total Bitcoin Addresses Loaded", "Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1 Uncompressed: ', uaddr,", "= 2**x y=int(input(\"Stop range Max in BITs 256 Max (Puzzle StopNumber) -> \"))", "and Checking : ',str (line_count)) x=int(input(\"'Start range in BITs 0 or Higher(Puzzle StartNumber)", "wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX,", "Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic Address 3 Segwit: ' + saddr)", "% ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr =", "3 Segwit: ' + saddr) f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic", "bc1 P2WSH: ', bc1addr) print('Scan : ', count , ' : Total :", "print(\"Starting search... Please Wait min range: \" + str(a)) print(\"Max range: \" +", "' : Total : ', total, ' : HEX : ', HEX, end='\\r')", "str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) count=0 total=0", "wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy", "saddr) f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1 P2WSH: '", "b = 2**y print(\"Starting search... Please Wait min range: \" + str(a)) print(\"Max", ": ', total, ' : HEX : ', HEX, end='\\r') if caddr in", "bcaddr in add or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): '", "= 2**y print(\"Starting search... Please Wait min range: \" + str(a)) print(\"Max range:", "+ wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address 1 Compressed: ' +", ": ',str (line_count)) count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\"", "caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH']", "bitcoinaddress import Wallet import random filename ='puzzle.txt' with open(filename) as f: line_count =", "set(add) print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) x=int(input(\"'Start range in", "',str (line_count)) x=int(input(\"'Start range in BITs 0 or Higher(Puzzle StartNumber) -> \")) a", "P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made", ": Total : ', total, ' : HEX : ', HEX, end='\\r') if", "', saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH: ',", "='puzzle.txt' with open(filename) as f: line_count = 0 for line in f: line", "or saddr in add or bcaddr in add or bc1addr in add: print('\\nMatch", "filename ='puzzle.txt' with open(filename) as f: line_count = 0 for line in f:", "caddr) f.write('\\nPublic Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic Address 3 Segwit: '", "(dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ',", "#Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH']", "<reponame>Mizogg/Fillbit-Bitcoin-Address #Fullbit.py =====Made by <EMAIL> Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import", "add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex): ' +", "HEX = \"%064x\" % ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed", "' + caddr) f.write('\\nPublic Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic Address 3", "wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic Address", "1 Uncompressed: ' + uaddr) f.write('\\nPublic Address 3 Segwit: ' + saddr) f.write('\\nPublic", "f: line_count = 0 for line in f: line != \"\\n\" line_count +=", "and Checking : ',str (line_count)) count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX", "Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH:", "3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import random filename ='puzzle.txt' with open(filename) as", "'\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic Address 1", "wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ',", "open(filename) as f: line_count = 0 for line in f: line != \"\\n\"", "+ bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made by mizogg.co.uk", "-> \")) b = 2**y print(\"Starting search... Please Wait min range: \" +", "wifc, '\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed: ', caddr,", "f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address 1 Compressed: ' + caddr) f.write('\\nPublic", "print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) count=0 total=0 while", "' + HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: ' + wifc)", "file.read().split() add = set(add) print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count))", "f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made by mizogg.co.uk Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB", "(hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: ' +", "ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c']", "= wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec):", "in BITs 0 or Higher(Puzzle StartNumber) -> \")) a = 2**x y=int(input(\"Stop range", ": ', count , ' : Total : ', total, ' : HEX", "or Higher(Puzzle StartNumber) -> \")) a = 2**x y=int(input(\"Stop range Max in BITs", "f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed:", "uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr", "', wifc, '\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed: ',", "= wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex):", "', total, ' : HEX : ', HEX, end='\\r') if caddr in add", "or uaddr in add or saddr in add or bcaddr in add or", ": ', HEX, end='\\r') if caddr in add or uaddr in add or", "bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made by mizogg.co.uk Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB =====' )", "in f: line != \"\\n\" line_count += 1 with open(filename) as file: add", "str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking", "Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan :", "in add or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' +", "HEX, end='\\r') if caddr in add or uaddr in add or saddr in", "or bcaddr in add or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec):", "0 for line in f: line != \"\\n\" line_count += 1 with open(filename)", "Loaded and Checking : ',str (line_count)) x=int(input(\"'Start range in BITs 0 or Higher(Puzzle", "',str (line_count)) count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" %", "'\\nPublic Address 1 Uncompressed: ', uaddr, '\\nPublic Address 1 Compressed: ', caddr, '\\nPublic", "', caddr, '\\nPublic Address 3 Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH: ',", "(hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address", "'\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan", "' + bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr) f.write('\\n =====Made by", "P2WSH: ' + bc1addr) f.write('\\n =====Made by mizogg.co.uk Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB =====' ) f.close()", "wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed:", "Address bc1 P2WSH: ', bc1addr) print('Scan : ', count , ' : Total", "#Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address", "(line_count)) x=int(input(\"'Start range in BITs 0 or Higher(Puzzle StartNumber) -> \")) a =", "f.write('\\nPublic Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic Address 3 Segwit: ' +", "add or uaddr in add or saddr in add or bcaddr in add", "str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed:", "compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif", "Please Wait min range: \" + str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\")", "Compressed: ', caddr, '\\nPublic Address 3 Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH:", "= wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey", "'\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1 Uncompressed: ',", "P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan : ', count", "caddr in add or uaddr in add or saddr in add or bcaddr", "f.write('\\nPublic Address 1 Compressed: ' + caddr) f.write('\\nPublic Address 1 Uncompressed: ' +", "StopNumber) -> \")) b = 2**y print(\"Starting search... Please Wait min range: \"", "!= \"\\n\" line_count += 1 with open(filename) as file: add = file.read().split() add", "Address 3 Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address", "in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex): '", "Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import random filename ='puzzle.txt' with open(filename)", "bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan : ',", "(line_count)) count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran", "f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address 1", "Compressed: ' + caddr) f.write('\\nPublic Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic Address", "from bitcoinaddress import Wallet import random filename ='puzzle.txt' with open(filename) as f: line_count", "= 0 for line in f: line != \"\\n\" line_count += 1 with", "line_count += 1 with open(filename) as file: add = file.read().split() add = set(add)", "total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1']", "1 Compressed: ', caddr, '\\nPublic Address 3 Segwit: ', saddr, '\\nPublic Address bc1", "P2WSH: ', bc1addr) print('Scan : ', count , ' : Total : ',", "with open(filename) as f: line_count = 0 for line in f: line !=", "wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ',", "bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey", "in add or uaddr in add or saddr in add or bcaddr in", "+ HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic", "random filename ='puzzle.txt' with open(filename) as f: line_count = 0 for line in", "' + wifc) f.write('\\nPublic Address 1 Compressed: ' + caddr) f.write('\\nPublic Address 1", "' + wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address 1 Compressed: '", "Address 3 Segwit: ' + saddr) f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr)", "while True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet = Wallet(HEX)", "print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) x=int(input(\"'Start range in BITs", "wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr =", "min range: \" + str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin", "in BITs 256 Max (Puzzle StopNumber) -> \")) b = 2**y print(\"Starting search...", "Max in BITs 256 Max (Puzzle StopNumber) -> \")) b = 2**y print(\"Starting", "+ str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) count=0", "Address 1 Compressed: ', caddr, '\\nPublic Address 3 Segwit: ', saddr, '\\nPublic Address", "Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr)", "+ saddr) f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1 P2WSH:", "Addresses Loaded and Checking : ',str (line_count)) count=0 total=0 while True: count+=1 total+=5", "BITs 0 or Higher(Puzzle StartNumber) -> \")) a = 2**x y=int(input(\"Stop range Max", "x=int(input(\"'Start range in BITs 0 or Higher(Puzzle StartNumber) -> \")) a = 2**x", "Segwit: ' + saddr) f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address", "wif) f.write('\\nPrivatekey compressed: ' + wifc) f.write('\\nPublic Address 1 Compressed: ' + caddr)", "= set(add) print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) x=int(input(\"'Start range", "wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ',", "in add or bcaddr in add or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\")", "line in f: line != \"\\n\" line_count += 1 with open(filename) as file:", "bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' + bc1addr) f.write('\\n", "add = file.read().split() add = set(add) print('Total Bitcoin Addresses Loaded and Checking :", "BITs 256 Max (Puzzle StopNumber) -> \")) b = 2**y print(\"Starting search... Please", "', bc1addr) print('Scan : ', count , ' : Total : ', total,", "Wallet import random filename ='puzzle.txt' with open(filename) as f: line_count = 0 for", "ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic", "by <EMAIL> Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import random filename ='puzzle.txt'", "Higher(Puzzle StartNumber) -> \")) a = 2**x y=int(input(\"Stop range Max in BITs 256", "===== from bitcoinaddress import Wallet import random filename ='puzzle.txt' with open(filename) as f:", "count=0 total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet", "', uaddr, '\\nPublic Address 1 Compressed: ', caddr, '\\nPublic Address 3 Segwit: ',", "(dec): ' + str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: ' +", "wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey (dec): ', ran,'\\nPrivatekey (hex): ', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey", "wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc =", "bc1addr) print('Scan : ', count , ' : Total : ', total, '", "\" + str(a)) print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded", "'\\nPublic Address 1 Compressed: ', caddr, '\\nPublic Address 3 Segwit: ', saddr, '\\nPublic", "with open(filename) as file: add = file.read().split() add = set(add) print('Total Bitcoin Addresses", "total=0 while True: count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet =", "saddr in add or bcaddr in add or bc1addr in add: print('\\nMatch Found')", "Uncompressed: ' + uaddr) f.write('\\nPublic Address 3 Segwit: ' + saddr) f.write('\\nPublic Address", "Max (Puzzle StopNumber) -> \")) b = 2**y print(\"Starting search... Please Wait min", "HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1 Uncompressed:", "compressed: ' + wifc) f.write('\\nPublic Address 1 Compressed: ' + caddr) f.write('\\nPublic Address", "f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1 P2WSH: ' +", "3 Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1", "f: line != \"\\n\" line_count += 1 with open(filename) as file: add =", "-> \")) a = 2**x y=int(input(\"Stop range Max in BITs 256 Max (Puzzle", "count , ' : Total : ', total, ' : HEX : ',", "f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: ' + wif) f.write('\\nPrivatekey compressed: '", "add or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran))", "f.write('\\nPublic Address 3 Segwit: ' + saddr) f.write('\\nPublic Address bc1 P2WPKH: ' +", "saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif']", "or bc1addr in add: print('\\nMatch Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey", "\")) b = 2**y print(\"Starting search... Please Wait min range: \" + str(a))", "in add or saddr in add or bcaddr in add or bc1addr in", "+ wifc) f.write('\\nPublic Address 1 Compressed: ' + caddr) f.write('\\nPublic Address 1 Uncompressed:", "'\\nPublic Address bc1 P2WSH: ', bc1addr) print('Scan : ', count , ' :", "+ caddr) f.write('\\nPublic Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic Address 3 Segwit:", "' + saddr) f.write('\\nPublic Address bc1 P2WPKH: ' + bcaddr) f.write('\\nPublic Address bc1", "+ uaddr) f.write('\\nPublic Address 3 Segwit: ' + saddr) f.write('\\nPublic Address bc1 P2WPKH:", "print(\"Max range: \" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking :", "line_count = 0 for line in f: line != \"\\n\" line_count += 1", ": HEX : ', HEX, end='\\r') if caddr in add or uaddr in", "caddr, '\\nPublic Address 3 Segwit: ', saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr,", "bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc'] #print('\\nPrivatekey", "line != \"\\n\" line_count += 1 with open(filename) as file: add = file.read().split()", "Found') f=open(\"winner.txt\",\"a\") f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey", "count+=1 total+=5 ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet = Wallet(HEX) uaddr =", "1 Compressed: ' + caddr) f.write('\\nPublic Address 1 Uncompressed: ' + uaddr) f.write('\\nPublic", "= file.read().split() add = set(add) print('Total Bitcoin Addresses Loaded and Checking : ',str", "add or saddr in add or bcaddr in add or bc1addr in add:", "wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3']", "=====Made by <EMAIL> Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import random filename", "\")) a = 2**x y=int(input(\"Stop range Max in BITs 256 Max (Puzzle StopNumber)", "add = set(add) print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count)) x=int(input(\"'Start", "Bitcoin Addresses Loaded and Checking : ',str (line_count)) x=int(input(\"'Start range in BITs 0", "range in BITs 0 or Higher(Puzzle StartNumber) -> \")) a = 2**x y=int(input(\"Stop", "0 or Higher(Puzzle StartNumber) -> \")) a = 2**x y=int(input(\"Stop range Max in", "', HEX, '\\nPrivatekey Uncompressed: ', wif, '\\nPrivatekey compressed: ', wifc, '\\nPublic Address 1", "end='\\r') if caddr in add or uaddr in add or saddr in add", "', count , ' : Total : ', total, ' : HEX :", "<EMAIL> Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import random filename ='puzzle.txt' with", "ran=random.randrange(a,b) HEX = \"%064x\" % ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy", "= \"%064x\" % ran wallet = Wallet(HEX) uaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1'] #Legacy uncompressed address", "\" + str(b)) print(\"==========================================================\") print('Total Bitcoin Addresses Loaded and Checking : ',str (line_count))", "f.write('\\nPrivatekey (dec): ' + str(ran)) f.write('\\nPrivatekey (hex): ' + HEX) f.write('\\nPrivatekey Uncompressed: '", "y=int(input(\"Stop range Max in BITs 256 Max (Puzzle StopNumber) -> \")) b =", "= wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc", "for line in f: line != \"\\n\" line_count += 1 with open(filename) as", "address caddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr1c'] #Legacy compressed address saddr = wallet.address.__dict__['mainnet'].__dict__['pubaddr3'] #segwit_address bcaddr =", "#segwit_address bcaddr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WPKH'] bc1addr = wallet.address.__dict__['mainnet'].__dict__['pubaddrbc1_P2WSH'] wif = wallet.key.__dict__['mainnet'].__dict__['wif'] wifc = wallet.key.__dict__['mainnet'].__dict__['wifc']", "', HEX, end='\\r') if caddr in add or uaddr in add or saddr", "Checking : ',str (line_count)) x=int(input(\"'Start range in BITs 0 or Higher(Puzzle StartNumber) ->", "saddr, '\\nPublic Address bc1 P2WPKH: ', bcaddr, '\\nPublic Address bc1 P2WSH: ', bc1addr)", "#Fullbit.py =====Made by <EMAIL> Donations 3P7PZLbwSt2bqUMsHF9xDsaNKhafiGuWDB ===== from bitcoinaddress import Wallet import random", "2**y print(\"Starting search... Please Wait min range: \" + str(a)) print(\"Max range: \"", "range Max in BITs 256 Max (Puzzle StopNumber) -> \")) b = 2**y" ]
[ "debug) if debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length should be 5", "simplicity of the decision surface. # A low C makes the decision surface", "baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json') as f: voltsTest = json.load(f) #", "+ str(len(X))) print(\"y: \" + str(y)) ## 3. Train Model with features #", "+ str(y)) ## Feature Standardization X = preprocessing.scale(X) return X, y def modelAccuracy(y_test,", "length is now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250", ": \" + str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd)", "of positive tuples are labelled as such? recall = metrics.recall_score(y_test, y_pred) return [accuracy,", "of trainingCmd with open(filepath) as f: data = json.load(f) traindata.append(np.array(data, dtype='f')) # read", "'/data/mind/' # default path if python script runs standalone if (os.path.basename(cwd) == \"pyscripts\"):", "as blf: bl = json.load(blf) baseline = np.array(bl, dtype='f') ## read in test", "8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples): \" +", "if debug: print(\"predicted y \" + str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred)", "print(\"Recall: \" + str(recall)) # send success back to node # TODO: implement", "in test data with open(traindataFolder + 'test-baseline.json') as f: baselineTest = json.load(f) with", "str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples): \" + str(len(filterdTraindata[0][0]))) # # save", "Extract Features ## Reshape Data reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx,", "print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and Y Data ---\") print(\"y :", "as support vectors. # Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO:", "filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(),", "test dataset if debug: print(\"predicted y \" + str(y_pred)) [accuracy, precision, recall] =", "more samples as support vectors. # Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html", "filterd Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile =", "Accuracy print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test) # Predict the response for", "def findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3, 13)", "traindata = [] for cmd in range(cmdCount): filepath = Path(traindataFolder + 'training-' +", "+ str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length", "as such? recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def findTrainClassifier(X, y):", "by giving the model freedom to select more samples as support vectors. #", "Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka", "of training examples against simplicity of the decision surface. # A low C", "= [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and Y Data ---\")", "trades off misclassification of training examples against simplicity of the decision surface. #", "‘close’. # C: trades off misclassification of training examples against simplicity of the", "X and Y Data ---\") print(\"y : \" + str(y)) ## Feature Standardization", "metrics.precision_score(y_test, y_pred) # Model Recall: what percentage of positive tuples are labelled as", "baseline = np.array(bl, dtype='f') ## read in test data with open(traindataFolder + 'test-baseline.json')", "str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \" + str(recall)) # send success back", "with stored traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder", "print(\"traindata[0][0] length should be 8 (channels): \" + str(len(traindata[0][0]))) # 1. Filter and", "np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True,", "is now 250 (samples): \" + str(len(filterdTraindata[0][0]))) # # save filterd Data #", "if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata = [] for", "and high values meaning ‘close’. # C: trades off misclassification of training examples", "labelled as such? recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def findTrainClassifier(X,", "such? precision = metrics.precision_score(y_test, y_pred) # Model Recall: what percentage of positive tuples", "if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData)))", "classification with traingdata. check SVM algorithm\") print(\"\\n------ Test Data ------\") ## 4. Filter", "% (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start process if __name__ == '__main__':", "for SVM training X = [] y = [] # TODO: Extract Features", "dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X,", "13) param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(),", "# ## import json import os import sys import time import pickle import", "'voldown'] cmdCount = len(commands) # nr of commands def main(): # read training", "misclassification of training examples against simplicity of the decision surface. # A low", "sklearn import svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import", "str(len(traindata))) print(\"traindata[0] length should be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should", "# Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test)))", "# indent=4) ### this saves the array in .json format # outfile =", "cmd in range(cmdCount): filepath = Path(traindataFolder + 'training-' + commands[cmd] + '.json') #", "be 8 (channels): \" + str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata and", "array voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training", "examples against simplicity of the decision surface. # A low C makes the", "X_Test: \" + str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6. Check Model Accuracy", "in range(cmdCount): filepath = Path(traindataFolder + 'training-' + commands[cmd] + '.json') # read", "str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \"", "low C makes the decision surface smooth, while a high C aims at", "print(\"y_Test: \" + str(y_test)) ## 6. Check Model Accuracy print(\"\\n------ Model Accuracy ------\")", "traindataFolder = cwd + '/data/mind/' # default path if python script runs standalone", "Accuracy: how often is the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model", ".json format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'),", "Model Recall: what percentage of positive tuples are labelled as such? recall =", "(beta) # ## import json import os import sys import time import pickle", "y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and Y Data", "str(recall)) # send success back to node # TODO: implement real success boolean", "SVM params [C, gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X,", "send success back to node # TODO: implement real success boolean return print('true')", "import numpy as np from mindFunctions import filterDownsampleData import codecs, json from scipy.signal", "with open(filepath) as f: data = json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline", "Feature Standardization X = preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred): # Model", "= np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny))", "str(precision)) print(\"Recall: \" + str(recall)) # send success back to node # TODO:", "------\") ## 4. Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug)", "Y data for SVM training X = [] y = [] # TODO:", "makes the decision surface smooth, while a high C aims at classifying all", "sort_keys=True, # indent=4) ### this saves the array in .json format ## 2.", "\" + str(precision)) print(\"Recall: \" + str(recall)) # send success back to node", "if debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct", "Test Data ------\") ## 4. Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest,", "\" + str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd)", "= np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42)", "json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline from file baseline = [] blpath", "node # TODO: implement real success boolean return print('true') def extractFeature(dataFilterd): ## Create", "with open(traindataFolder + 'test-volts.json') as f: voltsTest = json.load(f) # create a numpy", "y.append(cmd) # Feature Standardization X = preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd):", "positive tuples are labeled as such? precision = metrics.precision_score(y_test, y_pred) # Model Recall:", ": \" + str(y)) ## Feature Standardization X = preprocessing.scale(X) return X, y", "[] for cmd in range(cmdCount): filepath = Path(traindataFolder + 'training-' + commands[cmd] +", "test data with open(traindataFolder + 'test-baseline.json') as f: baselineTest = json.load(f) with open(traindataFolder", "X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and Y Data ---\") print(\"y : \"", "X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X = preprocessing.scale(X) return", "create a numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if", "codecs, json from scipy.signal import butter, lfilter from sklearn import svm, preprocessing, metrics", "default path with stored traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json' cwd =", "from scipy.signal import butter, lfilter from sklearn import svm, preprocessing, metrics from sklearn.model_selection", "X and Y data for SVM test X = [] y = []", "default path if python script runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder =", "json.load(f) with open(traindataFolder + 'test-volts.json') as f: voltsTest = json.load(f) # create a", "save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ## Check if trainingdata", "Data ---\") print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000", "print(\"Precision: \" + str(precision)) print(\"Recall: \" + str(recall)) # send success back to", "if debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length should be 5 (cmds):", "traingdata. check SVM algorithm\") print(\"\\n------ Test Data ------\") ## 4. Filter and Downsample", "\" + str(len(X))) print(\"y: \" + str(y)) ## 3. Train Model with features", "values meaning ‘close’. # C: trades off misclassification of training examples against simplicity", "[filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------ Filtered Training Data", "[accuracy, precision, recall] def findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13) gamma_range =", "4. Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5.", "pathlib import Path # enable/disable debug Mode debug = False # the 5", "5 commands from player commands = ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount =", "5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length should be 1500 (samples): \" +", "implement real success boolean return print('true') def extractFeature(dataFilterd): ## Create X and Y", "## 5. Extract Features from Testdata targetCmd = 1 # Playpause===1 [X_test, y_test]", "script runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata", "(only commands) [X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" + str(len(X)))", "and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract Features", "sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import Path # enable/disable debug Mode debug", "data for SVM training X = [] y = [] # TODO: Extract", "such? recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def findTrainClassifier(X, y): C_range", "param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The best parameters are %s with a", "boolean return print('true') def extractFeature(dataFilterd): ## Create X and Y data for SVM", "## Create X and Y data for SVM test X = [] y", "C_range = np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range,", "X = [] y = [] # TODO: Extract Features ## Reshape Data", "file of trainingCmd with open(filepath) as f: data = json.load(f) traindata.append(np.array(data, dtype='f')) #", "Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------", "format ## 2. Extract Features for Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata)", "‘far’ and high values meaning ‘close’. # C: trades off misclassification of training", "\" + str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP]", "preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd): ## Create X and Y data", "len(commands) # nr of commands def main(): # read training data from files", "y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test: \" +", "param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid,", "%0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start process if __name__ ==", "cmd): ## Create X and Y data for SVM test X = []", "range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X = preprocessing.scale(X)", "= StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug:", "path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd + '/data/mind/' # default path if", "commands, debug) ## 5. Extract Features from Testdata targetCmd = 1 # Playpause===1", "trainingdata get 100% accuracy if debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if", "dtype='f') ## read in test data with open(traindataFolder + 'test-baseline.json') as f: baselineTest", "= np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range)", "cmdCount = len(commands) # nr of commands def main(): # read training data", "files # default path with stored traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json'", "samples as support vectors. # Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html #", "mindFunctions import filterDownsampleData import codecs, json from scipy.signal import butter, lfilter from sklearn", "single training example reaches, with low values meaning ‘far’ and high values meaning", "metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import Path # enable/disable debug", "print(\"Anz. Features: \" + str(len(X))) print(\"y: \" + str(y)) ## 3. Train Model", "str(len(X))) print(\"y: \" + str(y)) ## 3. Train Model with features # gamma:", "precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" + str(precision))", "print(\"y: \" + str(y)) ## 3. Train Model with features # gamma: defines", "print(\"filterdTraindata length should be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now", "for Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \"", "Y data for SVM test X = [] y = [] print(len(X)) X.append(dataDownSample)", "classification with traingdata\") else: print(\"Wrong classification with traingdata. check SVM algorithm\") print(\"\\n------ Test", "# default path if python script runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder", "str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata,", "= ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands) # nr of commands", "length is now 250 (samples): \" + str(len(filterdTraindata[0][0]))) # # save filterd Data", "and Y data for SVM training X = [] y = [] #", "surface. # A low C makes the decision surface smooth, while a high", "## read in test data with open(traindataFolder + 'test-baseline.json') as f: baselineTest =", "debug: print(\"\\n------ Training Data ------\") print(\"traindata length should be 5 (cmds): \" +", "y.append(cmd) if debug: print(\"\\n-- X and Y Data ---\") print(\"y : \" +", "= json.load(f) with open(traindataFolder + 'test-volts.json') as f: voltsTest = json.load(f) # create", "length should be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should be 8", "success boolean return print('true') def extractFeature(dataFilterd): ## Create X and Y data for", "example reaches, with low values meaning ‘far’ and high values meaning ‘close’. #", "targetCmd = 1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test:", "= svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as", "percentage of positive tuples are labelled as such? recall = metrics.recall_score(y_test, y_pred) return", "import filterDownsampleData import codecs, json from scipy.signal import butter, lfilter from sklearn import", "of baseline with open(blpath) as blf: bl = json.load(blf) baseline = np.array(bl, dtype='f')", "# (beta) # ## import json import os import sys import time import", "Check if trainingdata get 100% accuracy if debug: [accuracy, _, _] = modelAccuracy(y,", "data with open(traindataFolder + 'test-baseline.json') as f: baselineTest = json.load(f) with open(traindataFolder +", "'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves the array in", "'/../../data/mind/' traindata = [] for cmd in range(cmdCount): filepath = Path(traindataFolder + 'training-'", "_, _] = modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification with traingdata\")", "from files # default path with stored traingsdata # filepath-example = 'your project", "cwd + '/data/mind/' # default path if python script runs standalone if (os.path.basename(cwd)", "should be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels):", "parameters are %s with a score of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'],", "import sys import time import pickle import numpy as np from mindFunctions import", "'.json') # read file of trainingCmd with open(filepath) as f: data = json.load(f)", "os import sys import time import pickle import numpy as np from mindFunctions", "TODO: Extract Features ## Reshape Data reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd,", "# nr of commands def main(): # read training data from files #", "+ str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6. Check Model Accuracy print(\"\\n------ Model", "classifying all training examples correctly by giving the model freedom to select more", "extractFeature(dataFilterd): ## Create X and Y data for SVM training X = []", "sort_keys=True, # indent=4) ### this saves the array in .json format # outfile", "a single training example reaches, with low values meaning ‘far’ and high values", "+ str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] =", "+ str(len(traindata))) print(\"traindata[0] length should be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length", "# the 5 commands from player commands = ['volup', 'playpause', 'next', 'prev', 'voldown']", "trainingCmd with open(filepath) as f: data = json.load(f) traindata.append(np.array(data, dtype='f')) # read in", "= np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile,", "meaning ‘close’. # C: trades off misclassification of training examples against simplicity of", "are labelled as such? recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def", "str(y_test)) ## 6. Check Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test)", "svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import Path #", "Path # enable/disable debug Mode debug = False # the 5 commands from", "Train Model with features # gamma: defines how far the influence of a", "# create a numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f')", "and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------ Filtered", "[C, gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ##", "# send success back to node # TODO: implement real success boolean return", "be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels): \"", "surface smooth, while a high C aims at classifying all training examples correctly", "SVM test X = [] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug:", "data from files # default path with stored traingsdata # filepath-example = 'your", "# baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'),", "# A low C makes the decision surface smooth, while a high C", "cv=cv) grid.fit(X, y) if debug: print(\"The best parameters are %s with a score", "y): C_range = np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3, 13) param_grid =", "# json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves", "# indent=4) ### this saves the array in .json format ## 2. Extract", "= preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd): ## Create X and Y", "= filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract Features from Testdata targetCmd =", "y_pred) return [accuracy, precision, recall] def findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13)", "now 250 (samples): \" + str(len(filterdTraindata[0][0]))) # # save filterd Data # filterdTraindata", "## 2. Extract Features for Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata) if", "the influence of a single training example reaches, with low values meaning ‘far’", "params [C, gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y)", "for SVM test X = [] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if", "y \" + str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" +", "1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" +", "project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd + '/data/mind/' # default path", "':'), sort_keys=True, # indent=4) ### this saves the array in .json format #", "+ str(precision)) print(\"Recall: \" + str(recall)) # send success back to node #", "sys import time import pickle import numpy as np from mindFunctions import filterDownsampleData", "reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData =", "save filterd Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile", "decision surface smooth, while a high C aims at classifying all training examples", "of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start process if __name__", "Y Data ---\") print(\"y : \" + str(y)) ## Feature Standardization X =", "[] y = [] # TODO: Extract Features ## Reshape Data reshapedData =", "\" + str(len(traindata))) print(\"traindata[0] length should be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0]", "import os import sys import time import pickle import numpy as np from", "ny)) if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds: \" +", "a high C aims at classifying all training examples correctly by giving the", "Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt'", "## 3. Train Model with features # gamma: defines how far the influence", "(os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata = [] for cmd", "with open(blpath) as blf: bl = json.load(blf) baseline = np.array(bl, dtype='f') ## read", "commands) [X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" + str(len(X))) print(\"y:", "success back to node # TODO: implement real success boolean return print('true') def", "as f: voltsTest = json.load(f) # create a numpy array voltsTest = np.array(voltsTest,", "saves the array in .json format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile,", "test X = [] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n--", "(accuracy == 1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong classification with traingdata. check", "== 1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong classification with traingdata. check SVM", "select more samples as support vectors. # Find optimal gamma and C parameters:", "def extractFeature(dataFilterd): ## Create X and Y data for SVM training X =", "y_pred): # Model Accuracy: how often is the classifier correct accuracy = metrics.accuracy_score(y_test,", "model freedom to select more samples as support vectors. # Find optimal gamma", "Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test) # Predict the response", "Set correct SVM params [C, gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma,", "y.append(cmd) y.append(cmd) # Feature Standardization X = preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample,", "---\") print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 :", "= Path(traindataFolder + 'training-baseline.json') # read file of baseline with open(blpath) as blf:", "= filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata", "y_pred) # Model Precision: what percentage of positive tuples are labeled as such?", "vectors. # Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct", "filepath-example = 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd + '/data/mind/'", "from Testdata targetCmd = 1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz.", "# save filterd Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) #", "# read file of baseline with open(blpath) as blf: bl = json.load(blf) baseline", "and Y data for SVM test X = [] y = [] print(len(X))", "training data from files # default path with stored traingsdata # filepath-example =", "for test dataset if debug: print(\"predicted y \" + str(y_pred)) [accuracy, precision, recall]", "data for SVM test X = [] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd)", "+ '/../../data/mind/' traindata = [] for cmd in range(cmdCount): filepath = Path(traindataFolder +", "= False # the 5 commands from player commands = ['volup', 'playpause', 'next',", "defines how far the influence of a single training example reaches, with low", "tuples are labelled as such? recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall]", "cwd + '/../../data/mind/' traindata = [] for cmd in range(cmdCount): filepath = Path(traindataFolder", "Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test:", "= json.load(blf) baseline = np.array(bl, dtype='f') ## read in test data with open(traindataFolder", "'../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this", "aims at classifying all training examples correctly by giving the model freedom to", "traingdata\") else: print(\"Wrong classification with traingdata. check SVM algorithm\") print(\"\\n------ Test Data ------\")", "_] = modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification with traingdata\") else:", "(samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels): \" + str(len(traindata[0][0])))", "Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if debug:", "with open(traindataFolder + 'test-baseline.json') as f: baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json')", "debug: print(\"\\n-- X and Y Data ---\") print(\"y : \" + str(y)) ##", "= metrics.precision_score(y_test, y_pred) # Model Recall: what percentage of positive tuples are labelled", "with a score of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start", "## Check if trainingdata get 100% accuracy if debug: [accuracy, _, _] =", "f: voltsTest = json.load(f) # create a numpy array voltsTest = np.array(voltsTest, dtype='f')", "json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves the", "Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract", "+ str(y)) ## 3. Train Model with features # gamma: defines how far", "= cwd + '/../../data/mind/' traindata = [] for cmd in range(cmdCount): filepath =", "Standardization X = preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd): ## Create X", "player commands = ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands) # nr", "array in .json format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'),", "= GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The best parameters are %s", "json import os import sys import time import pickle import numpy as np", "['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands) # nr of commands def", "= len(commands) # nr of commands def main(): # read training data from", "(samples): \" + str(len(filterdTraindata[0][0]))) # # save filterd Data # filterdTraindata = np.array(filterdTraindata)", "# json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves", "C makes the decision surface smooth, while a high C aims at classifying", "\"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata = [] for cmd in range(cmdCount):", "y = [] # TODO: Extract Features ## Reshape Data reshapedData = []", "train eeg data of mind commands # (beta) # ## import json import", "from player commands = ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands) #", "Path(traindataFolder + 'training-baseline.json') # read file of baseline with open(blpath) as blf: bl", "1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels): \" +", "json from scipy.signal import butter, lfilter from sklearn import svm, preprocessing, metrics from", "codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves the array", "all training examples correctly by giving the model freedom to select more samples", "## 6. Check Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test) #", "+ str(recall)) # send success back to node # TODO: implement real success", "return [accuracy, precision, recall] def findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13) gamma_range", "# Model Accuracy: how often is the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred)", "# outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, #", "print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test) # Predict the response for test", "this saves the array in .json format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(),", "Testdata targetCmd = 1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features", "Data reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData", "X = preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred): # Model Accuracy: how", "svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile:", "targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6.", "Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract Features from", "data = json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline from file baseline =", "if python script runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd +", "[accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification with", "often is the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision: what", "reaches, with low values meaning ‘far’ and high values meaning ‘close’. # C:", "= [] y = [] # TODO: Extract Features ## Reshape Data reshapedData", "Mode debug = False # the 5 commands from player commands = ['volup',", "# TODO: implement real success boolean return print('true') def extractFeature(dataFilterd): ## Create X", "for cmd in range(cmdCount): filepath = Path(traindataFolder + 'training-' + commands[cmd] + '.json')", "gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params [C, gamma]", "get 100% accuracy if debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if (accuracy", "pickle import numpy as np from mindFunctions import filterDownsampleData import codecs, json from", "ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n-- Reshaped", "y_pred = clf.predict(X_test) # Predict the response for test dataset if debug: print(\"predicted", "in .json format ## 2. Extract Features for Trainingdata (only commands) [X, y]", "Training Data ------\") print(\"filterdTraindata length should be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0]", "= clf.predict(X_test) # Predict the response for test dataset if debug: print(\"predicted y", "8*250=2000 : \" + str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd)", "\" + str(y)) ## Feature Standardization X = preprocessing.scale(X) return X, y def", ".json format ## 2. Extract Features for Trainingdata (only commands) [X, y] =", "main(): # read training data from files # default path with stored traingsdata", "print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples", "np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data ------\") print(\"traindata", "else: print(\"Wrong classification with traingdata. check SVM algorithm\") print(\"\\n------ Test Data ------\") ##", "'prev', 'voldown'] cmdCount = len(commands) # nr of commands def main(): # read", "stored traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder =", "= json.load(f) # create a numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest =", "'test-baseline.json') as f: baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json') as f: voltsTest", "[] # TODO: Extract Features ## Reshape Data reshapedData = [] dataFilterdNp =", "# read file of trainingCmd with open(filepath) as f: data = json.load(f) traindata.append(np.array(data,", "f: baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json') as f: voltsTest = json.load(f)", "Standardization X = preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred): # Model Accuracy:", "dtype='f') if debug: print(\"\\n------ Training Data ------\") print(\"traindata length should be 5 (cmds):", "as f: baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json') as f: voltsTest =", "decision surface. # A low C makes the decision surface smooth, while a", "of the decision surface. # A low C makes the decision surface smooth,", "= [] # TODO: Extract Features ## Reshape Data reshapedData = [] dataFilterdNp", "python script runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/'", "print(\"\\n------ Test Data ------\") ## 4. Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest,", "how often is the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision:", "y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \" + str(recall))", "traindata.append(np.array(data, dtype='f')) # read in baseline from file baseline = [] blpath =", "nx * ny)) if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds:", "test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The best", "recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def findTrainClassifier(X, y): C_range =", "training X = [] y = [] # TODO: Extract Features ## Reshape", "encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves the array in .json", "with features # gamma: defines how far the influence of a single training", "+ str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels): \" + str(len(traindata[0][0]))) # 1.", "clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong classification with", "as such? precision = metrics.precision_score(y_test, y_pred) # Model Recall: what percentage of positive", "if debug: print(\"\\n------ Training Data ------\") print(\"traindata length should be 5 (cmds): \"", "= json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline from file baseline = []", "def modelAccuracy(y_test, y_pred): # Model Accuracy: how often is the classifier correct accuracy", "GridSearchCV, StratifiedShuffleSplit from pathlib import Path # enable/disable debug Mode debug = False", "nr of commands def main(): # read training data from files # default", "outfile: pickle.dump(clf, outfile) ## Check if trainingdata get 100% accuracy if debug: [accuracy,", "print(\"Wrong classification with traingdata. check SVM algorithm\") print(\"\\n------ Test Data ------\") ## 4.", "## Create X and Y data for SVM training X = [] y", "= np.array(bl, dtype='f') ## read in test data with open(traindataFolder + 'test-baseline.json') as", "* ny)) if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds: \"", "against simplicity of the decision surface. # A low C makes the decision", "length should be 8 (channels): \" + str(len(traindata[0][0]))) # 1. Filter and Downsample", "dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n-- Reshaped Data ---\")", "dataset if debug: print(\"predicted y \" + str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test,", "open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ## Check if trainingdata get 100% accuracy", "13) gamma_range = np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5,", "gamma: defines how far the influence of a single training example reaches, with", "# Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM", "to select more samples as support vectors. # Find optimal gamma and C", "np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid", "this saves the array in .json format ## 2. Extract Features for Trainingdata", "\" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0]", "with low values meaning ‘far’ and high values meaning ‘close’. # C: trades", "aka 8*250=2000 : \" + str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000])", "open(traindataFolder + 'test-volts.json') as f: voltsTest = json.load(f) # create a numpy array", "= findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save model", "the array in .json format ## 2. Extract Features for Trainingdata (only commands)", "Data ------\") print(\"filterdTraindata length should be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length", "data of mind commands # (beta) # ## import json import os import", "baseline = [] blpath = Path(traindataFolder + 'training-baseline.json') # read file of baseline", "from sklearn import svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib", "if debug: print(\"The best parameters are %s with a score of %0.2f\" %", "from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import Path # enable/disable debug Mode", "str(y)) ## Feature Standardization X = preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred):", "3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid =", "5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0])))", "= preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred): # Model Accuracy: how often", "read file of trainingCmd with open(filepath) as f: data = json.load(f) traindata.append(np.array(data, dtype='f'))", "(cmds): \" + str(len(traindata))) print(\"traindata[0] length should be 1500 (samples): \" + str(len(traindata[0])))", "import Path # enable/disable debug Mode debug = False # the 5 commands", "return X, y def modelAccuracy(y_test, y_pred): # Model Accuracy: how often is the", "'next', 'prev', 'voldown'] cmdCount = len(commands) # nr of commands def main(): #", "print(\"predicted y \" + str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \"", "cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0]))) for", "# read in baseline from file baseline = [] blpath = Path(traindataFolder +", "read file of baseline with open(blpath) as blf: bl = json.load(blf) baseline =", "length should be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8", "\" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0]))) for cmd", "butter, lfilter from sklearn import svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit", "commands from player commands = ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands)", "modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong classification", "Reshape Data reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape", "= np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'),", "8 (channels): \" + str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata and Baseline", "Create X and Y data for SVM test X = [] y =", "scipy.signal import butter, lfilter from sklearn import svm, preprocessing, metrics from sklearn.model_selection import", "# default path with stored traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json' cwd", "= extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test: \" + str(y_test))", "are %s with a score of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma']", "grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The best parameters are", "pickle.dump(clf, outfile) ## Check if trainingdata get 100% accuracy if debug: [accuracy, _,", "np from mindFunctions import filterDownsampleData import codecs, json from scipy.signal import butter, lfilter", "[X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test: \"", "+ 'test-baseline.json') as f: baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json') as f:", "1. Filter and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands,", "[] blpath = Path(traindataFolder + 'training-baseline.json') # read file of baseline with open(blpath)", "= [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd,", "\" + str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels): \" + str(len(traindata[0][0]))) #", "outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4)", "http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params [C, gamma] = findTrainClassifier(X, y) clf", "traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd", "y def extractFeatureTest(dataDownSample, cmd): ## Create X and Y data for SVM test", "baseline with open(blpath) as blf: bl = json.load(blf) baseline = np.array(bl, dtype='f') ##", "Data ------\") ## 4. Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands,", "with traingdata. check SVM algorithm\") print(\"\\n------ Test Data ------\") ## 4. Filter and", "dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx *", "numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------", "Model Accuracy ------\") y_pred = clf.predict(X_test) # Predict the response for test dataset", "enable/disable debug Mode debug = False # the 5 commands from player commands", "str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0]))) for cmd in range(cmdCount):", "debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length should be 5 (cmds): \"", "channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000])", "False # the 5 commands from player commands = ['volup', 'playpause', 'next', 'prev',", "Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract Features from Testdata", "= modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \"", "(channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples): \" + str(len(filterdTraindata[0][0])))", "str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature", "mind commands # (beta) # ## import json import os import sys import", "X = preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd): ## Create X and", "features # gamma: defines how far the influence of a single training example", "findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3, 13) param_grid", "import json import os import sys import time import pickle import numpy as", "np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if", "C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y)", "filterDownsampleData import codecs, json from scipy.signal import butter, lfilter from sklearn import svm,", "StratifiedShuffleSplit from pathlib import Path # enable/disable debug Mode debug = False #", "filepath = Path(traindataFolder + 'training-' + commands[cmd] + '.json') # read file of", "json.load(blf) baseline = np.array(bl, dtype='f') ## read in test data with open(traindataFolder +", "+ str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) #", "and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params [C, gamma] =", "lfilter from sklearn import svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from", "read in baseline from file baseline = [] blpath = Path(traindataFolder + 'training-baseline.json')", "optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params [C,", "dtype='f')) # read in baseline from file baseline = [] blpath = Path(traindataFolder", "SVM training X = [] y = [] # TODO: Extract Features ##", "+ str(y_test)) ## 6. Check Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred =", "dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5", "of positive tuples are labeled as such? precision = metrics.precision_score(y_test, y_pred) # Model", "# Feature Standardization X = preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd): ##", "# C: trades off misclassification of training examples against simplicity of the decision", "the model freedom to select more samples as support vectors. # Find optimal", "### this saves the array in .json format # outfile = '../../data/mind/model/baselineDataBP.txt' #", "== \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata = [] for cmd in", "for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization", "from pathlib import Path # enable/disable debug Mode debug = False # the", "parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params [C, gamma] = findTrainClassifier(X, y)", "------\") y_pred = clf.predict(X_test) # Predict the response for test dataset if debug:", "'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd + '/data/mind/' # default", "(cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels): \" + str(len(filterdTraindata[0])))", "[filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract Features from Testdata targetCmd", "far the influence of a single training example reaches, with low values meaning", "# gamma: defines how far the influence of a single training example reaches,", "GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The best parameters are %s with", "Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params", "Precision: what percentage of positive tuples are labeled as such? precision = metrics.precision_score(y_test,", "real success boolean return print('true') def extractFeature(dataFilterd): ## Create X and Y data", "debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification", "voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data", "= [] blpath = Path(traindataFolder + 'training-baseline.json') # read file of baseline with", "should be 8 (channels): \" + str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata", "and Y Data ---\") print(\"y : \" + str(y)) ## Feature Standardization X", "json.load(f) # create a numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest,", "modelAccuracy(y_test, y_pred): # Model Accuracy: how often is the classifier correct accuracy =", "model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ## Check if trainingdata get", "+ str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0]))) for cmd in", "of mind commands # (beta) # ## import json import os import sys", "runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata =", "the decision surface smooth, while a high C aims at classifying all training", "cwd = os.getcwd() traindataFolder = cwd + '/data/mind/' # default path if python", "# filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' #", "= '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ###", "## Reshape Data reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny =", "bl = json.load(blf) baseline = np.array(bl, dtype='f') ## read in test data with", "= os.getcwd() traindataFolder = cwd + '/data/mind/' # default path if python script", "Data ------\") print(\"traindata length should be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length", "'../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this", "= 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd + '/data/mind/' #", "# read training data from files # default path with stored traingsdata #", "read training data from files # default path with stored traingsdata # filepath-example", "of a single training example reaches, with low values meaning ‘far’ and high", "commands[cmd] + '.json') # read file of trainingCmd with open(filepath) as f: data", "import codecs, json from scipy.signal import butter, lfilter from sklearn import svm, preprocessing,", "modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \" +", "indent=4) ### this saves the array in .json format ## 2. Extract Features", "check SVM algorithm\") print(\"\\n------ Test Data ------\") ## 4. Filter and Downsample Testdata", "traindataFolder = cwd + '/../../data/mind/' traindata = [] for cmd in range(cmdCount): filepath", "print(\"traindata length should be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length should be", "------\") print(\"filterdTraindata length should be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is", "+ '.json') # read file of trainingCmd with open(filepath) as f: data =", "print('true') def extractFeature(dataFilterd): ## Create X and Y data for SVM training X", "tuples are labeled as such? precision = metrics.precision_score(y_test, y_pred) # Model Recall: what", "# Predict the response for test dataset if debug: print(\"predicted y \" +", "Extract Features from Testdata targetCmd = 1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata,", "random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The best parameters", "# 1. Filter and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline,", "score of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start process if", "recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall:", "= modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong", "## import json import os import sys import time import pickle import numpy", "as f: data = json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline from file", "## 4. Filter and Downsample Testdata [filterdTestdata] = filterDownsampleData(voltsTest, baselineTest, commands, debug) ##", "print(\"y : \" + str(y)) ## Feature Standardization X = preprocessing.scale(X) return X,", "be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels): \"", "debug) ## 5. Extract Features from Testdata targetCmd = 1 # Playpause===1 [X_test,", "= metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage of positive tuples are labeled", "SVM algorithm\") print(\"\\n------ Test Data ------\") ## 4. Filter and Downsample Testdata [filterdTestdata]", "+ commands[cmd] + '.json') # read file of trainingCmd with open(filepath) as f:", "be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length should be 1500 (samples): \"", "as outfile: pickle.dump(clf, outfile) ## Check if trainingdata get 100% accuracy if debug:", "debug Mode debug = False # the 5 commands from player commands =", "C=C) clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile)", "str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is", "'test-volts.json') as f: voltsTest = json.load(f) # create a numpy array voltsTest =", "(grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start process if __name__ == '__main__': main()", "correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage of positive tuples", "cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X", "5. Extract Features from Testdata targetCmd = 1 # Playpause===1 [X_test, y_test] =", "Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" +", "reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData)", "print(\"The best parameters are %s with a score of %0.2f\" % (grid.best_params_, grid.best_score_))", "precision = metrics.precision_score(y_test, y_pred) # Model Recall: what percentage of positive tuples are", "\" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples): \" + str(len(filterdTraindata[0][0]))) #", "+ str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples): \" + str(len(filterdTraindata[0][0]))) # #", "blf: bl = json.load(blf) baseline = np.array(bl, dtype='f') ## read in test data", "print(\"filterdTraindata[0][0] length is now 250 (samples): \" + str(len(filterdTraindata[0][0]))) # # save filterd", "= dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka", "Filter and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug)", "Model Precision: what percentage of positive tuples are labeled as such? precision =", "os.getcwd() traindataFolder = cwd + '/data/mind/' # default path if python script runs", "Model with features # gamma: defines how far the influence of a single", "X, y def extractFeatureTest(dataDownSample, cmd): ## Create X and Y data for SVM", "high C aims at classifying all training examples correctly by giving the model", "correct SVM params [C, gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C)", "extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" + str(len(X))) print(\"y: \" + str(y)) ##", "import time import pickle import numpy as np from mindFunctions import filterDownsampleData import", "commands = ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands) # nr of", "Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------ Filtered Training", "Path(traindataFolder + 'training-' + commands[cmd] + '.json') # read file of trainingCmd with", "np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w',", "dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data ------\") print(\"traindata length", "influence of a single training example reaches, with low values meaning ‘far’ and", "return print('true') def extractFeature(dataFilterd): ## Create X and Y data for SVM training", "StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if debug: print(\"The", "percentage of positive tuples are labeled as such? precision = metrics.precision_score(y_test, y_pred) #", "as np from mindFunctions import filterDownsampleData import codecs, json from scipy.signal import butter,", "and Downsample Trainingdata and Baseline [filterdTraindata, baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if", "if trainingdata get 100% accuracy if debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X))", "import svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import Path", "gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save", "Features from Testdata targetCmd = 1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd)", "should be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length should be 1500 (samples):", "debug = False # the 5 commands from player commands = ['volup', 'playpause',", "= [] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and", "[] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and Y", "separators=(',', ':'), sort_keys=True, # indent=4) ### this saves the array in .json format", "## Feature Standardization X = preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred): #", "path if python script runs standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd", "print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \"", "X and Y data for SVM training X = [] y = []", "a score of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] # start process", "nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n--", "Accuracy ------\") y_pred = clf.predict(X_test) # Predict the response for test dataset if", "'playpause', 'next', 'prev', 'voldown'] cmdCount = len(commands) # nr of commands def main():", "str(len(traindata[0]))) print(\"traindata[0][0] length should be 8 (channels): \" + str(len(traindata[0][0]))) # 1. Filter", "6. Check Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test) # Predict", "[] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X and Y Data ---\") print(\"y", "= Path(traindataFolder + 'training-' + commands[cmd] + '.json') # read file of trainingCmd", "how far the influence of a single training example reaches, with low values", "+ 'training-' + commands[cmd] + '.json') # read file of trainingCmd with open(filepath)", "+ '/data/mind/' # default path if python script runs standalone if (os.path.basename(cwd) ==", "clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ##", "response for test dataset if debug: print(\"predicted y \" + str(y_pred)) [accuracy, precision,", "np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv", "saves the array in .json format ## 2. Extract Features for Trainingdata (only", "examples correctly by giving the model freedom to select more samples as support", "print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6. Check", "[X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" + str(len(X))) print(\"y: \"", "# enable/disable debug Mode debug = False # the 5 commands from player", "filterDownsampleData(voltsTest, baselineTest, commands, debug) ## 5. Extract Features from Testdata targetCmd = 1", "file baseline = [] blpath = Path(traindataFolder + 'training-baseline.json') # read file of", "numpy as np from mindFunctions import filterDownsampleData import codecs, json from scipy.signal import", "print(\"\\n-- X and Y Data ---\") print(\"y : \" + str(y)) ## Feature", "are labeled as such? precision = metrics.precision_score(y_test, y_pred) # Model Recall: what percentage", "+ 'training-baseline.json') # read file of baseline with open(blpath) as blf: bl =", "'training-baseline.json') # read file of baseline with open(blpath) as blf: bl = json.load(blf)", "'training-' + commands[cmd] + '.json') # read file of trainingCmd with open(filepath) as", "the 5 commands from player commands = ['volup', 'playpause', 'next', 'prev', 'voldown'] cmdCount", "\" + str(y_test)) ## 6. Check Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred", "= dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug): print(\"\\n-- Reshaped Data", "eeg data of mind commands # (beta) # ## import json import os", "import pickle import numpy as np from mindFunctions import filterDownsampleData import codecs, json", "extractFeatureTest(dataDownSample, cmd): ## Create X and Y data for SVM test X =", "baseline from file baseline = [] blpath = Path(traindataFolder + 'training-baseline.json') # read", "print(\"Correct classification with traingdata\") else: print(\"Wrong classification with traingdata. check SVM algorithm\") print(\"\\n------", "path with stored traingsdata # filepath-example = 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd()", "open(filepath) as f: data = json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline from", "open(blpath) as blf: bl = json.load(blf) baseline = np.array(bl, dtype='f') ## read in", "blpath = Path(traindataFolder + 'training-baseline.json') # read file of baseline with open(blpath) as", "print(\"traindata[0] length should be 1500 (samples): \" + str(len(traindata[0]))) print(\"traindata[0][0] length should be", "str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6. Check Model Accuracy print(\"\\n------ Model Accuracy", "y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X = preprocessing.scale(X) return X, y def", "y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt',", "return X, y def extractFeatureTest(dataDownSample, cmd): ## Create X and Y data for", "5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels): \" +", "print(\"\\n------ Training Data ------\") print(\"traindata length should be 5 (cmds): \" + str(len(traindata)))", "to node # TODO: implement real success boolean return print('true') def extractFeature(dataFilterd): ##", "np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data ------\") print(\"traindata length should be 5", "TODO: Set correct SVM params [C, gamma] = findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf',", "str(len(filterdTraindata[0][0]))) # # save filterd Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP =", "Features X_Test: \" + str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6. Check Model", "values meaning ‘far’ and high values meaning ‘close’. # C: trades off misclassification", "------\") print(\"traindata length should be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length should", "metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage of positive tuples are labeled as", "best parameters are %s with a score of %0.2f\" % (grid.best_params_, grid.best_score_)) return", "what percentage of positive tuples are labelled as such? recall = metrics.recall_score(y_test, y_pred)", "Filtered Training Data ------\") print(\"filterdTraindata length should be 5 (cmds): \" + str(len(filterdTraindata)))", "= extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" + str(len(X))) print(\"y: \" + str(y))", "# TODO: Set correct SVM params [C, gamma] = findTrainClassifier(X, y) clf =", "classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage of positive", "support vectors. # Find optimal gamma and C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set", "= np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data ------\") print(\"traindata length should be", "10, 13) gamma_range = np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv =", "standalone if (os.path.basename(cwd) == \"pyscripts\"): traindataFolder = cwd + '/../../data/mind/' traindata = []", "\" + str(len(filterdTraindata[0][0]))) # # save filterd Data # filterdTraindata = np.array(filterdTraindata) #", "def main(): # read training data from files # default path with stored", "## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ## Check if", "## # train eeg data of mind commands # (beta) # ## import", "open(traindataFolder + 'test-baseline.json') as f: baselineTest = json.load(f) with open(traindataFolder + 'test-volts.json') as", "= np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data ------\")", "if debug: print(\"\\n-- X and Y Data ---\") print(\"y : \" + str(y))", "= dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv)", "cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2, random_state=42) grid = GridSearchCV(svm.SVC(), param_grid=param_grid, cv=cv) grid.fit(X, y) if", "with traingdata\") else: print(\"Wrong classification with traingdata. check SVM algorithm\") print(\"\\n------ Test Data", "+ 'test-volts.json') as f: voltsTest = json.load(f) # create a numpy array voltsTest", "print(\"filterdTraindata[0] length is now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now", "Create X and Y data for SVM training X = [] y =", "= [] for cmd in range(cmdCount): filepath = Path(traindataFolder + 'training-' + commands[cmd]", "debug: print(\"predicted y \" + str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy:", "freedom to select more samples as support vectors. # Find optimal gamma and", "preprocessing.scale(X) return X, y def modelAccuracy(y_test, y_pred): # Model Accuracy: how often is", "commands def main(): # read training data from files # default path with", "+ str(len(filterdTraindata[0][0]))) # # save filterd Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP", "array in .json format ## 2. Extract Features for Trainingdata (only commands) [X,", "print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \" + str(recall)) #", "commands, debug) if debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length should be", "from file baseline = [] blpath = Path(traindataFolder + 'training-baseline.json') # read file", "\" + str(y)) ## 3. Train Model with features # gamma: defines how", "(channels): \" + str(len(traindata[0][0]))) # 1. Filter and Downsample Trainingdata and Baseline [filterdTraindata,", "X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X = preprocessing.scale(X) return X, y", "\" + str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \" + str(recall)) # send", "C: trades off misclassification of training examples against simplicity of the decision surface.", "y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features: \" + str(len(X))) print(\"y: \" +", "X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X = preprocessing.scale(X) return X,", "print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" + str(len(reshapedData[0]))) for cmd in range(cmdCount): X.append(reshapedData[cmd][0:2000])", "\" + str(recall)) # send success back to node # TODO: implement real", "X, y def modelAccuracy(y_test, y_pred): # Model Accuracy: how often is the classifier", "baselineDataBP] = filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------ Filtered Training Data ------\")", "C parameters: http://scikit-learn.org/stable/auto_examples/svm/plot_rbf_parameters.html # TODO: Set correct SVM params [C, gamma] = findTrainClassifier(X,", "from mindFunctions import filterDownsampleData import codecs, json from scipy.signal import butter, lfilter from", "import GridSearchCV, StratifiedShuffleSplit from pathlib import Path # enable/disable debug Mode debug =", "in baseline from file baseline = [] blpath = Path(traindataFolder + 'training-baseline.json') #", "= metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def findTrainClassifier(X, y): C_range = np.logspace(-2,", "Check Model Accuracy print(\"\\n------ Model Accuracy ------\") y_pred = clf.predict(X_test) # Predict the", "time import pickle import numpy as np from mindFunctions import filterDownsampleData import codecs,", "clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb')", "import butter, lfilter from sklearn import svm, preprocessing, metrics from sklearn.model_selection import GridSearchCV,", "Features ## Reshape Data reshapedData = [] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny", "y) if debug: print(\"The best parameters are %s with a score of %0.2f\"", "Features for Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz. Features:", "giving the model freedom to select more samples as support vectors. # Find", "in .json format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',',", "%s with a score of %0.2f\" % (grid.best_params_, grid.best_score_)) return grid.best_params_['C'], grid.best_params_['gamma'] #", "TODO: implement real success boolean return print('true') def extractFeature(dataFilterd): ## Create X and", "off misclassification of training examples against simplicity of the decision surface. # A", "preprocessing, metrics from sklearn.model_selection import GridSearchCV, StratifiedShuffleSplit from pathlib import Path # enable/disable", "= 1 # Playpause===1 [X_test, y_test] = extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \"", "outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4)", "# Model Precision: what percentage of positive tuples are labeled as such? precision", "commands # (beta) # ## import json import os import sys import time", "# TODO: Extract Features ## Reshape Data reshapedData = [] dataFilterdNp = np.array(dataFilterd)", "+ str(accuracy)) print(\"Precision: \" + str(precision)) print(\"Recall: \" + str(recall)) # send success", "json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ### this saves the", "f: data = json.load(f) traindata.append(np.array(data, dtype='f')) # read in baseline from file baseline", "2. Extract Features for Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata) if debug:", "Features: \" + str(len(X))) print(\"y: \" + str(y)) ## 3. Train Model with", "if (accuracy == 1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong classification with traingdata.", "clf.predict(X_test) # Predict the response for test dataset if debug: print(\"predicted y \"", "is now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples):", "y_pred) # Model Recall: what percentage of positive tuples are labelled as such?", "filterDownsampleData(traindata, baseline, commands, debug) if debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length", "baselineTest = np.array(baselineTest, dtype='f') if debug: print(\"\\n------ Training Data ------\") print(\"traindata length should", "now 8 (channels): \" + str(len(filterdTraindata[0]))) print(\"filterdTraindata[0][0] length is now 250 (samples): \"", "positive tuples are labelled as such? recall = metrics.recall_score(y_test, y_pred) return [accuracy, precision,", "grid.fit(X, y) if debug: print(\"The best parameters are %s with a score of", "# outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, #", "file of baseline with open(blpath) as blf: bl = json.load(blf) baseline = np.array(bl,", "= cwd + '/data/mind/' # default path if python script runs standalone if", "at classifying all training examples correctly by giving the model freedom to select", "while a high C aims at classifying all training examples correctly by giving", "baselineTest, commands, debug) ## 5. Extract Features from Testdata targetCmd = 1 #", "training examples correctly by giving the model freedom to select more samples as", "### this saves the array in .json format ## 2. Extract Features for", "[] dataFilterdNp = np.array(dataFilterd) trainCmd, nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx", "# # save filterd Data # filterdTraindata = np.array(filterdTraindata) # baselineDataBP = np.array(baselineDataBP)", "outfile) ## Check if trainingdata get 100% accuracy if debug: [accuracy, _, _]", "(debug): print(\"\\n-- Reshaped Data ---\") print(\"len(reshapedData) aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0])", "baseline, commands, debug) if debug: print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length should", "the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage of", "voltsTest = json.load(f) # create a numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest", "Recall: what percentage of positive tuples are labelled as such? recall = metrics.recall_score(y_test,", "back to node # TODO: implement real success boolean return print('true') def extractFeature(dataFilterd):", "y def modelAccuracy(y_test, y_pred): # Model Accuracy: how often is the classifier correct", "should be 5 (cmds): \" + str(len(filterdTraindata))) print(\"filterdTraindata[0] length is now 8 (channels):", "is the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage", "aka 5 cmds: \" + str(len(reshapedData))) print(\"len(reshapedData[0]) channels*samples aka 8*250=2000 : \" +", "Training Data ------\") print(\"traindata length should be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0]", "the decision surface. # A low C makes the decision surface smooth, while", "length should be 5 (cmds): \" + str(len(traindata))) print(\"traindata[0] length should be 1500", "algorithm\") print(\"\\n------ Test Data ------\") ## 4. Filter and Downsample Testdata [filterdTestdata] =", "metrics.recall_score(y_test, y_pred) return [accuracy, precision, recall] def findTrainClassifier(X, y): C_range = np.logspace(-2, 10,", "---\") print(\"y : \" + str(y)) ## Feature Standardization X = preprocessing.scale(X) return", "what percentage of positive tuples are labeled as such? precision = metrics.precision_score(y_test, y_pred)", "range(cmdCount): filepath = Path(traindataFolder + 'training-' + commands[cmd] + '.json') # read file", "high values meaning ‘close’. # C: trades off misclassification of training examples against", "100% accuracy if debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if (accuracy ==", "if debug: print(\"Anz. Features: \" + str(len(X))) print(\"y: \" + str(y)) ## 3.", "C aims at classifying all training examples correctly by giving the model freedom", "Predict the response for test dataset if debug: print(\"predicted y \" + str(y_pred))", "indent=4) ### this saves the array in .json format # outfile = '../../data/mind/model/baselineDataBP.txt'", "Feature Standardization X = preprocessing.scale(X) return X, y def extractFeatureTest(dataDownSample, cmd): ## Create", "'wb') as outfile: pickle.dump(clf, outfile) ## Check if trainingdata get 100% accuracy if", "gamma=gamma, C=C) clf.fit(X, y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf,", "# Model Recall: what percentage of positive tuples are labelled as such? recall", "format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True,", "with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ## Check if trainingdata get 100%", "findTrainClassifier(X, y) clf = svm.SVC(kernel='rbf', gamma=gamma, C=C) clf.fit(X, y) ## save model with", "y) ## save model with open('../../data/mind/model/svm_model-mind.txt', 'wb') as outfile: pickle.dump(clf, outfile) ## Check", "print(\"\\n------ Filtered Training Data ------\") print(\"filterdTraindata length should be 5 (cmds): \" +", "smooth, while a high C aims at classifying all training examples correctly by", "the response for test dataset if debug: print(\"predicted y \" + str(y_pred)) [accuracy,", "read in test data with open(traindataFolder + 'test-baseline.json') as f: baselineTest = json.load(f)", "# train eeg data of mind commands # (beta) # ## import json", "correctly by giving the model freedom to select more samples as support vectors.", "debug: print(\"The best parameters are %s with a score of %0.2f\" % (grid.best_params_,", "meaning ‘far’ and high values meaning ‘close’. # C: trades off misclassification of", "low values meaning ‘far’ and high values meaning ‘close’. # C: trades off", "the array in .json format # outfile = '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w',", "in range(cmdCount): X.append(reshapedData[cmd][0:2000]) X.append(reshapedData[cmd][2000:4000]) X.append(reshapedData[cmd][4000:6000]) y.append(cmd) y.append(cmd) y.append(cmd) # Feature Standardization X =", "accuracy = metrics.accuracy_score(y_test, y_pred) # Model Precision: what percentage of positive tuples are", "Data ---\") print(\"y : \" + str(y)) ## Feature Standardization X = preprocessing.scale(X)", "1.0): print(\"Correct classification with traingdata\") else: print(\"Wrong classification with traingdata. check SVM algorithm\")", "np.array(bl, dtype='f') ## read in test data with open(traindataFolder + 'test-baseline.json') as f:", "A low C makes the decision surface smooth, while a high C aims", "# filepath-example = 'your project path'/data/mind/training-playpause.json' cwd = os.getcwd() traindataFolder = cwd +", "3. Train Model with features # gamma: defines how far the influence of", "baselineDataBP = np.array(baselineDataBP) # outfile = '../../data/mind/model/filterdTraingdata.txt' # json.dump(filterdTraindata.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',',", "250 (samples): \" + str(len(filterdTraindata[0][0]))) # # save filterd Data # filterdTraindata =", "def extractFeatureTest(dataDownSample, cmd): ## Create X and Y data for SVM test X", "gamma_range = np.logspace(-9, 3, 13) param_grid = dict(gamma=gamma_range, C=C_range) cv = StratifiedShuffleSplit(n_splits=5, test_size=0.2,", "Model Accuracy: how often is the classifier correct accuracy = metrics.accuracy_score(y_test, y_pred) #", "str(y)) ## 3. Train Model with features # gamma: defines how far the", "':'), sort_keys=True, # indent=4) ### this saves the array in .json format ##", "a numpy array voltsTest = np.array(voltsTest, dtype='f') baselineTest = np.array(baselineTest, dtype='f') if debug:", "= '../../data/mind/model/baselineDataBP.txt' # json.dump(baselineDataBP.tolist(), codecs.open(outfile, 'w', encoding='utf-8'), separators=(',', ':'), sort_keys=True, # indent=4) ###", "\" + str(len(X_test))) print(\"y_Test: \" + str(y_test)) ## 6. Check Model Accuracy print(\"\\n------", "recall] def findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13) gamma_range = np.logspace(-9, 3,", "training example reaches, with low values meaning ‘far’ and high values meaning ‘close’.", "of commands def main(): # read training data from files # default path", "extractFeature(filterdTestdata, targetCmd) print(\"Anz. Features X_Test: \" + str(len(X_test))) print(\"y_Test: \" + str(y_test)) ##", "precision, recall] def findTrainClassifier(X, y): C_range = np.logspace(-2, 10, 13) gamma_range = np.logspace(-9,", "[accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision: \" +", "trainCmd, nx, ny = dataFilterdNp.shape reshapedData = dataFilterdNp.reshape((trainCmd, nx * ny)) if (debug):", "training examples against simplicity of the decision surface. # A low C makes", "accuracy if debug: [accuracy, _, _] = modelAccuracy(y, clf.predict(X)) if (accuracy == 1.0):", "\" + str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy))", "labeled as such? precision = metrics.precision_score(y_test, y_pred) # Model Recall: what percentage of", "debug: print(\"Anz. Features: \" + str(len(X))) print(\"y: \" + str(y)) ## 3. Train", "Extract Features for Trainingdata (only commands) [X, y] = extractFeature(filterdTraindata) if debug: print(\"Anz.", "X = [] y = [] print(len(X)) X.append(dataDownSample) y.append(cmd) if debug: print(\"\\n-- X", "+ str(y_pred)) [accuracy, precision, recall] = modelAccuracy(y_test, y_pred) print(\"Accuracy: \" + str(accuracy)) print(\"Precision:" ]
[ "output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for i in game.get_move_effective()]: #intput verification output=input(game.move_description)", "while output not in [str(game.map_move_to_input[i]) for i in game.get_move_effective()]: #intput verification output=input(game.move_description) return(game.map_input_to_move[output])", "ai's output corresponds to the human input output=input(game.move_description) while output not in [str(game.map_move_to_input[i])", "<gh_stars>1-10 from src.games.games import Game def ai_output(board, game): #the ai's output corresponds to", "from src.games.games import Game def ai_output(board, game): #the ai's output corresponds to the", "import Game def ai_output(board, game): #the ai's output corresponds to the human input", "ai_output(board, game): #the ai's output corresponds to the human input output=input(game.move_description) while output", "game): #the ai's output corresponds to the human input output=input(game.move_description) while output not", "output corresponds to the human input output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for", "corresponds to the human input output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for i", "to the human input output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for i in", "#the ai's output corresponds to the human input output=input(game.move_description) while output not in", "def ai_output(board, game): #the ai's output corresponds to the human input output=input(game.move_description) while", "input output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for i in game.get_move_effective()]: #intput verification", "src.games.games import Game def ai_output(board, game): #the ai's output corresponds to the human", "human input output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for i in game.get_move_effective()]: #intput", "the human input output=input(game.move_description) while output not in [str(game.map_move_to_input[i]) for i in game.get_move_effective()]:", "Game def ai_output(board, game): #the ai's output corresponds to the human input output=input(game.move_description)" ]
[ "lookup of given query in CMS data-services. cmssh find command lookup given query", "fname = arg.replace('file=', '') if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]:", "name.find('.') == -1: archs.append(name) if archs: print '\\nInstalled architectures:' for item in archs:", "source pkg environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd =", "def lookup(arg): \"\"\" Perform lookup of given query in CMS data-services. \"\"\" arg", "'%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in ['external', 'lib']: link = '%s/%s/%s'", "= os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname): # we use Magic(cmd).execute", "for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir):", "msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and", "os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir,", "\\ + ' command to install one' print msg def cms_read(arg): \"\"\" cmssh", "cmd def cms_root(arg): \"\"\" cmssh command to run ROOT within cmssh Examples: cmssh>", "release.' print msg return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup", "dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE']", "msg += '\\nAvailable GRID commands: <cmd> either grid or voms\\n' msg += msg_green('vomsinit", "not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat = pat_se orig = src.split('", "output def check_release_arch(rel): \"Check release/architecture\" # check if given release name is installed", "to: %s\" % val print msg def dbs_instance(arg=None): \"\"\" cmssh command to show", "+= 'Use ' + msg_green('install %s' % rel) msg += ' command to", "1) if len(split) == 1: cmd = item args = '' else: cmd", "installed on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)):", "pat.match(rel): msg = 'Fail to validate release name \"%s\"' % rel print_error(msg) msg", "debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init on", "to install new release, ' msg += 'since cmssh was installed with system", "information for given meta-data entity, e.g. dataset, block, file, run. Examples: cmssh> info", "Once done print ' msg += msg_blue('EOF') + ' and hit ' +", "rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\" % rel cmd = 'source %s;", "lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\"", "msg_green('vomsinfo ') \\ + ' show your proxy info (aka voms-proxy-info)\\n' msg +=", "= release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for r in res] releases =", "cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug =", "limit = 0 debug = 0 res = das_client(host, query, idx, limit, debug,", "arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':') == -1: # not a SE:dir", "def options(arg): \"\"\"Extract options from given arg string\"\"\" opts = [] for par", "'Type your problem, attach traceback, etc. Once done print ' msg += msg_blue('EOF')", "msg_blue('cmshelp <command>') msg += '\\nInstall python software: ' + \\ msg_blue('pip <search|(un)install> <package>')", "arg.replace('release=', '') res = release_info(arg, debug) elif startswith: msg = 'No pattern is", "os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir,", "# not a SE:dir pattern run(\"mkdir %s\" % arg) else: try: status =", "lumi, site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility cmssh command.", "ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return # check if given release name", "print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if os.path.exists(dst):", "] cmd_list += ['cp %s file.root' % lfn, 'ls', 'cp file.root %s' %", "command copies local files/dirs to/from local files/dirs or CMS storate elements. Examples: cmssh>", "to install given release.' print msg return # set release architecture os.environ['SCRAM_ARCH'] =", "elif arg == 'dashboard': userdn = os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn)", "(root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given", "(query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + ' show/set DBS instance, default is", "Please note: to enable access to RunSummary service please ensure that your usercert.pem", "cmssh jobs command lists local job queue or provides information about jobs at", "def cmsrel(rel): \"\"\" cmssh release setup command, it setups CMSSW environment and creates", "= 0 das_client(host, query, idx, limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh command", "debug = get_ipython().debug except: debug = 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '')", "to create one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg", "-voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem)", "commands:\\n' msg += msg_green('find ') \\ + ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n'", "command and ' msg += 'CMS release environment will be set for you'", "cmsRun command. Requires cmsrel to setup CMSSW environment. \"\"\" cmd = 'cmsRun %s'", "cms_config(arg): \"\"\" Return configuration object for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\"", "given release.' print msg return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch #", "results(): \"\"\"Return results from recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command", "' + \\ msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\" cmshelp command", "releases: rel_arch = item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output", "environment cmd = 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr", "list available CMSSW releases, accepts <list|all> args\\n' msg += msg_green('install ') \\ +", "def cms_das_json(query): \"\"\" cmssh command which queries DAS data-service with provided query and", "return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\"", "to validate release name \"%s\"' % rel print_error(msg) msg = 'Please check the", "%s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to show or set", "arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try: debug =", "dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0',", "debug = get_ipython().debug args = arg.split('|') if len(args) == 1: # no filter", "= '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs =", "%s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt", "<list|all> args\\n' msg += msg_green('install ') \\ + ' install CMSSW release, e.g.", "dst.replace('&', '').strip() if dst == '.': dst = os.getcwd() # check if src", "+= 'apt-get update; ' msg = 'Initialize %s apt repository ...' % arch", "cmd = 'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel) else: print \"Installing", "/a/b/c cmssh> info run=160915 cmssh> info local_file.root Please note: to enable access to", "cmssh.iprint import print_warning, print_error, print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir,", "fname else: cmd = '-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug:", "'cmssh/DEMO') with open(path, 'r') as demo_file: print demo_file.read() def results(): \"\"\"Return results from", "cmssh. \"\"\" # system modules import os import re import sys import time", "[] for par in arg.split(): if len(par) > 0 and par[0] == '-':", "a list of releases installed on a system\" _osname, osarch = osparameters() releases", "arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg) else: fname =", "= CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug)", "if not rel_arch: msg = 'Release ' + msg_red(rel) msg += ' is", "that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try:", "user_input from cmssh.utils import execmd, touch, platform, fix_so from cmssh.cmsfs import dataset_info, block_info,", "from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils import list_results,", "'%s root -l %s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command", "debug) elif pat_release.match(arg): arg = arg.replace('release=', '') res = release_info(arg, debug) elif startswith:", "cmssh> rm local_file cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg =", "if archs: print '\\nInstalled architectures:' for item in archs: print item elif arg", "apt command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command which queries DAS", "cms_commands(_arg=None): \"\"\" cmssh command which lists all registered cmssh commands in current shell.", "from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import", "rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link,", "+ ' status of job queue or CMS jobs\\n' msg += msg_green('read ')", "Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT']", "from cmssh.url_utils import get_data, send_email from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from", "debug) elif startswith: msg = 'No pattern is allowed for %s look-up' %", "or not work_area: msg = 'In order to run crab command you must", "if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService',", "entities: if arg.startswith(item + '='): startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd", "'Your crab.cfg has been created, please edit it ' msg += 'appropriately and", "+= ['releases list', 'arch list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr =", "'') res = release_info(arg, debug) elif startswith: msg = 'No pattern is allowed", "os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1: archs.append(name) if archs: print '\\nInstalled architectures:'", "= mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command lists", "= orig_arg.split('>', 1) out = out.strip() arg = arg.strip() else: out = None", "arg.strip() else: out = None if arg: arg = arg.strip() read(arg, out, debug)", "+ ' show/set DBS instance, default is DBS global instance\\n' msg += msg_green('mkdir/rmdir", "to run ROOT xrdcp via cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\"", "work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area: msg = 'In", "'since cmssh was installed with system CMSSW install area' print msg return #", "' \\ + 'e.g. info run=160915\\n' msg += msg_green('das ') + ' query", "tests for cmssh\" for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn =", "in current shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) run(cmd) def", "of CMSSW architectures (aka SCRAM_ARCH)\" archs = [a for a in tc_architectures(arch_type)] return", "pat = pat_se orig = src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst): if", "GRID commands: <cmd> either grid or voms\\n' msg += msg_green('vomsinit ') \\ +", "DBS\" % arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '') res = run_info(arg,", "catch it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd = 'source `find", "% site) res = jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard", "') if not email: msg = \"You did your email address\" print_error(msg) return", "list available CMSSW releases on given platform cmssh> releases all # show all", "in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch =", "options[0] == '-f': overwrite = True else: overwrite = False except: traceback.print_exc() return", "% arg else: msg = 'Not supported apt command' raise Exception(msg) run(cmd) def", "host = 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0 res", "access2file(fname): with open(fname, 'r') as cms_json: print cms_json.read() def integration_tests(_arg): \"Run series of", "'\\nAvailable CMSSW commands (once you install any CMSSW release):\\n' msg += msg_green('releases ')", "not rel_arch: msg = 'Release ' + msg_red(rel) msg += ' is not", "Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915 cmssh> info local_file.root", "cmssh\" for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root'", "import list_results, check_os, unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy, run, user_input from", "(os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is not installed within cmssh, proceed' \\", "check if src still has options and user asked for -f options =", "It holds given command and provide a method to execute it in a", "https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug = get_ipython().debug except:", "'%s %s %s' % (pcre_init, gcc_init, root_init) cmd = '%s root -l %s'", "to list available releases.\\n' msg += 'Use ' + msg_green('install %s' % rel)", "cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command copies local files/dirs to/from local files/dirs", "rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment cmd = 'eval", "import pprint import mimetypes import traceback import subprocess # cmssh modules from cmssh.iprint", "res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity level", "' command to install given release.' print msg return # set release architecture", "file content\\n' msg += msg_green('root ') + ' invoke ROOT\\n' msg += msg_green('du", "= '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = []", "traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory on local filesystem or", "arg.strip() res = [] try: debug = get_ipython().debug except: debug = 0 orig_arg", "\"\"\" arg = arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch", "'-cache' in arg or '-get' in arg: cmd = 'apt%s' % arg else:", "== '/': cmd = '-e -f file:///%s' % fname else: cmd = '-e", "given CMSSW architecture, accept <list|all> args\\n' msg += msg_green('scram ') + ' CMSSW", "glob import shutil import base64 import pprint import mimetypes import traceback import subprocess", "releases = [str(r) for r in res] releases = list(set(releases)) releases.sort() for rel", "'\\nInstalled architectures:' for item in archs: print item elif arg == 'all' or", "jobs - site, which lists jobs at given site - dashboard, which lists", "def pkg_init(pkg_dir): \"Create CMS command to source pkg environment\" pkg_dir = '%s/%s/%s' \\", "% (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for arch,", "\"\"\" cmd = 'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command", "for r in res] releases = list(set(releases)) releases.sort() for rel in releases: print", "% arg) # DEBUG.set(arg) # else: # print_info(\"Debug level is %s\" % DEBUG.level)", "serate process, therefore # subprocess.Popen will not catch it run(cmd, sdir, 'bootstrap.log', msg,", "%s platform\" % platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for", "&' % lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list", "os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir,", "archs = [a for a in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show", "\"Set CMSSH pager to %s\" % arg else: val = os.environ.get('CMSSH_PAGER', None) msg", "if mtype[0]: print \"Mime type:\", mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if", "# set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES',", "e.g. ls local.file or ls /store/user/file.root\\n' msg += msg_green('rm ') + ' remove", "Email : ') if not email: msg = \"You did your email address\"", "crab_dir print_warning(msg) msg = 'Would you like to create one' if user_input(msg, default='N'):", "Optional parameters either <all> or <list> Examples: cmssh> arch # show current and", "output dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read", "\"\"\" Retrieve information about cmssh tickets, e.g. Examples: cmssh> tickets # list all", "+= msg_green('rm ') + ' remove file/LFN, ' \\ + 'e.g. rm local.file", "UNIX commands, e.g. ls, cp, supported in cmssh. \"\"\" # system modules import", "(se, site, dataset, block, run, release, file). Examples: cmssh> ls # UNIX command", "archs: print '\\nInstalled architectures:' for item in archs: print item elif arg ==", "+ msg_blue('commands') msg += '\\ncmssh command help : ' + msg_blue('cmshelp <command>') msg", "format\\n' msg += msg_green('jobs ') \\ + ' status of job queue or", "CMSSW architecture, accept <list|all> args\\n' msg += msg_green('scram ') + ' CMSSW scram", "run=160915\\n' msg += msg_green('das ') + ' query DAS service\\n' msg += msg_green('das_json", "in original shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True)", "to/from local files/dirs or CMS storate elements. Examples: cmssh> cp file1 file2 cmssh>", "'%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst =", "[] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1: archs.append(name) if", "name in cms_architectures('all'): if arg == 'all': print name else: if check_os(name): print", "reqmgr from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options from given arg string\"\"\"", "stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information", "sename, 'ls %s' % sename, 'rm %s/file.root' % sename, 'ls %s' % sename,", "% res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res) else: ticket = res to_user", "execute method will run in current shell environment # old command for reference:", "['cp %s file.root' % lfn, 'ls', 'cp file.root %s' % sename, 'ls %s'", "rel or rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in", "startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg", "item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check", "Set/get verbosity level \"\"\" arg = arg.strip() ipth = get_ipython() if arg ==", "msg += ' find dataset=/*Zee*\\n' msg += ' for r in results(): print", "command and provide a method to execute it in a shell \"\"\" def", "arg = arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\" List available CMS releases.", "% sename, ] cmd_list += ['cp %s file.root' % lfn, 'ls', 'cp file.root", "'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init on behalf", "in DBS\" % arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '') res =", "' e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check if we have stand-alone installation", "% sename, 'rm %s/file.root' % sename, 'ls %s' % sename, 'rm file.root', 'cp", "if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s rejected", "'source `find %s -name init.sh | tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir):", "dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n' msg", "\"\"\" res = None try: debug = get_ipython().debug except: debug = 0 orig_arg", "debug = get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('>') !=", "be posted as anonymous gist ticket' print_info(msg) if not user_input('Proceed', default='N'): return email", "in releases: print rel else: msg = \"\\nYou don't have yet CMSSW release", "print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command to source pkg environment\" pkg_dir", "disk usage for given site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands", "options and user asked for -f options = src.split(' ') if len(options) >", "parameters either <all> or <list> Examples: cmssh> arch # show current and installed", "dataset, block, file, run. Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh> info", "= 'Your SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg =", "== 'list': print_info('Local data transfer') dqueue(arg) elif arg == 'dashboard': userdn = os.environ.get('USER_DN',", "run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except: debug =", "print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) elif pat_site.match(arg): site =", "os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg)", "configuration object for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg", "run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs", "if dst == '.': dst = os.getcwd() # check if src still has", "= arg.split('|') if len(args) == 1: # no filter res = CMSMGR.lookup(arg) else:", "this ticket', default='N'): print_info('Aborting your action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\",", "idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes", "% lfn, 'cp %s file2.root &' % lfn2, 'ls'] cmd_list += ['find user=oliver',", "True dst = dst.replace('&', '').strip() if dst == '.': dst = os.getcwd() #", "(pcre_init, gcc_init, root_init) cmd = '%s root -l %s' % (pkgs_init, arg.strip()) run(cmd)", "')[-1] if os.path.exists(orig) and not pat.match(dst): if background: cmd = 'cp %s' %", "%s\" % val print msg def dbs_instance(arg=None): \"\"\" cmssh command to show or", "try: debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: mkdir", "= get_ipython().debug except: debug = 0 fname = arg.replace('file=', '') if arg and", "msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled", "get_ipython().debug except: debug = 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg =", "cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command within CMSSW", "RunSummary service please ensure that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if", "= arg print \"Switch to %s DBS instance\" % arg else: print \"Invalid", "# or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg =", "+= ' find dataset=/*Zee*\\n' msg += ' for r in results(): print r,", "msg return cmd = \"eval `scramv1 runtime -sh`; %s\" % cmd run(cmd, shell=True,", "def cms_find(arg): \"\"\" Perform lookup of given query in CMS data-services. cmssh find", "= execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information about cmssh", "executing cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root'", "ticket = pprint.pformat(res) else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title,", "len(par) > 0 and par[0] == '-': opts.append(par) return opts class Magic(object): \"\"\"", "cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel = rel.strip() if not rel", "for given CMSSW release\" args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output", "on given platform cmssh> releases all # show all known CMS releases, including", "base64 import pprint import mimetypes import traceback import subprocess # cmssh modules from", "shell=True) def get_release_arch(rel): \"Return architecture for given CMSSW release\" args = {'release': rel}", "msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be posted as anonymous gist", "debug) def verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg = arg.strip() ipth =", "val print msg def dbs_instance(arg=None): \"\"\" cmssh command to show or set DBS", "fname) try: debug = get_ipython().debug except: debug = 0 if debug and access2file(fname):", "cmssh command which queries DAS data-service with provided query. Examples: cmssh> das dataset=/ZMM*", "= 0 res = das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def", "import read from cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects import", "fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try: debug = get_ipython().debug except:", "print \"\\nInstalled releases:\" for rel in releases: print rel else: msg = \"\\nYou", "arch) dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name)", "if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases: releases.sort()", "arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s'", "cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if", "= 'No crab.cfg file found in %s' % crab_dir print_warning(msg) msg = 'Would", "cmssh rmdir command removes directory from local file system or CMS storage element.", "res = jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\"", "import time import json import glob import shutil import base64 import pprint import", "# check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg')", "user = arg.replace('user=', '') print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user})", "to identify CMSSW environment, please run first: ' msg = msg_red(msg) msg +=", "env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print \"While executing", "item args = '' else: cmd = split[0] args = split[-1] mgr.run_line_magic(cmd, args)", "cmssh cp command copies local files/dirs to/from local files/dirs or CMS storate elements.", "print ' msg += msg_blue('EOF') + ' and hit ' + msg_blue('Enter') +", "print \"%s is ready, cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute", "site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility cmssh command. Examples:", "Examples: cmssh> du # UNIX command cmssh> du T3_US_Cornell \"\"\" arg = arg.strip()", "commands def bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd", "cmsenv\\n' msg += 'please use ' + msg_green('cmsrel') + ' command and '", "get_ipython() rel = rel.strip() if not rel or rel in ['reset', 'clear', 'clean']:", "traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command lists local files/dirs/CMS storate elements or", "%s' % sename, 'mkdir %s/foo' % sename, 'ls %s' % sename, 'rmdir %s/foo'", "'apt%s' % arg else: msg = 'Not supported apt command' raise Exception(msg) run(cmd)", "arg == 'all': print 'CMSSW architectures:' else: print 'CMSSW architectures for %s:' \\", "cmssh command which lists all registered cmssh commands in current shell. Examples: cmssh>", "pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init on behalf of the", "ipython = get_ipython() if arg[0] == '(' and arg[-1] == ')': arg =", "find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find", "os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd =", "= 0 if not arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\"", "releases = list(set(releases)) releases.sort() for rel in releases: print rel installed_releases() def pkg_init(pkg_dir):", "' command and ' msg += 'CMS release environment will be set for", "commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k for k, v in mdict['line'].items()", "list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg = arg.strip() ipth", "root -l %s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to", "debug = get_ipython().debug except: debug = 0 fname = arg.replace('file=', '') if arg", "or in DBS\" % arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '') res", "raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''):", "query in CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh>", "match' def get_apt_init(arch): \"Return proper apt init.sh for given architecture\" apt_dir = os.path.join(\\", "cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS rm command works with local files/dirs", "def cms_lumi(arg): \"\"\" Return lumi info for a given dataset/file/block/lfn/run Examples: cmssh> lumi", "orig_arg.split('|', 1) arg = arg.strip() else: flt = None startswith = None entities", "process, therefore # subprocess.Popen will not catch it run(cmd, sdir, 'bootstrap.log', msg, debug,", "status print msg return print \"Searching for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH'])", "on a system\" _osname, osarch = osparameters() releases = [] for idir in", "True arg = arg.replace('&', '').strip() src, dst = arg.rsplit(' ', 1) if dst.find('&')", "architectures cmssh> arch list # show all CMSSW architectures for given platform \"\"\"", "os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base", "entity, ' \\ + 'e.g. info run=160915\\n' msg += msg_green('das ') + '", "+= 'otherwise it will be posted as anonymous gist ticket' print_info(msg) if not", "pager Examples: cmssh> pager # shows current setting cmssh> pager None # set", "arch) output.append(msg) if output: return ', '.join(output) osname, osarch = osparameters() if osname", "mkdir <options> dir\") if arg.find(':') == -1: # not a SE:dir pattern run(\"mkdir", "orig_arg.find('|') != -1: arg, flt = orig_arg.split('|', 1) arg = arg.strip() else: flt", "'ls run=%s' % run, 'ls file=%s' % lfn] cmd_list += ['ls %s' %", "archs: print item elif arg == 'all' or arg == 'list': if arg", "msg_green, msg_blue from cmssh.iprint import print_warning, print_error, print_status, print_info from cmssh.filemover import copy_lfn,", "msg += msg_green('releases ') \\ + ' list available CMSSW releases, accepts <list|all>", "config: config.write(crabconfig()) msg = 'Your crab.cfg has been created, please edit it '", "or len(glob.glob(dst)): cmd = \"rm %s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status", "def installed_releases(): \"Print a list of releases installed on a system\" _osname, osarch", "within cmssh Examples: cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc')", "cmssh> jobs user=my_cms_user_name \"\"\" res = None try: debug = get_ipython().debug except: debug", "' show or switch to given CMSSW architecture, accept <list|all> args\\n' msg +=", "crabconfig from cmssh.cern_html import read from cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr", "lookup of given query in CMS data-services. \"\"\" arg = arg.strip() debug =", "either <all> or <list> Examples: cmssh> arch # show current and installed architecture(s)", "background = True dst = dst.replace('&', '').strip() if dst == '.': dst =", "external+fakesystem+1.0; ' cmd += 'apt-get update; ' msg = 'Initialize %s apt repository", "can be spawned into serate process, therefore # subprocess.Popen will not catch it", "to install one' print msg def cms_read(arg): \"\"\" cmssh command to read provided", "msg += '\\nAvailable CMSSW commands (once you install any CMSSW release):\\n' msg +=", "CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ + ' switch to given CMSSW release", "pattern run(\"mkdir %s\" % arg) else: try: status = mkdir(arg, verbose=debug) print_status(status) except:", "rdir) for name in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') == 0", "msg = \"\\nYou don't have yet CMSSW release installed on your system.\" msg", "command\" # in CMS cmsenv is an alias to: eval `scramv1 runtime -sh`'", "cmssh disk utility cmssh command. Examples: cmssh> du # UNIX command cmssh> du", "def cms_config(arg): \"\"\" Return configuration object for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM", "if not userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] =", "'Not supported apt command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command which", "'') def github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets, e.g. Examples: cmssh> tickets", "user_input('Send this ticket', default='N'): print_info('Aborting your action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d", "command to install given release.' print msg return # set release architecture os.environ['SCRAM_ARCH']", "os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO'", "tc_url from cmssh.das import das_client from cmssh.url_utils import get_data, send_email from cmssh.regex import", "cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import read from cmssh.dashboard import jobsummary from", "else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\" root", "'/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\"", "% lfn, 'ls', 'cp file.root %s' % sename, 'ls %s' % sename, 'rm", "if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir)", "as config: config.write(crabconfig()) msg = 'Your crab.cfg has been created, please edit it", "debug = 0 if not arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir", "List available CMS releases. Optional parameters either <list> or <all> Examples: cmssh> releases", "<cmd> either grid or voms\\n' msg += msg_green('vomsinit ') \\ + ' setup", "find lumi run=190704 cmssh> find user=oliver List of supported entities: dataset, block, file,", "dst = os.getcwd() # check if src still has options and user asked", "'%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch", "site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD", "environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if not vdir", "as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s\"", "installed_releases() def pkg_init(pkg_dir): \"Create CMS command to source pkg environment\" pkg_dir = '%s/%s/%s'", "'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' % dataset2]", "msg += ' is not yet installed on your system.\\n' msg += 'Use", "or set DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg =", "command to show or set internal pager Examples: cmssh> pager # shows current", "if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd =", "\\ + ' show your proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery results", "or provides information about jobs at give site or for given user. It", "-l', 'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s' %", "= split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info command provides information for", "\"\"\" if arg: print \"CMSSW releases for %s platform\" % platform() res =", "dst.find('&') != -1: background = True dst = dst.replace('&', '').strip() if dst ==", "meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + ' show/set DBS instance, default", "if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\" # debug shell command", "arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0] ipython", "os.environ['SCRAM_ARCH'] archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') ==", "root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s'", "% run, 'ls file=%s' % lfn] cmd_list += ['ls %s' % dataset, 'info", "or switch to given CMSSW architecture, accept <list|all> args\\n' msg += msg_green('scram ')", "%H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}} res = post_ticket(key, files) if res.has_key('html_url'):", "'OSX/ia32 is not supported in CMSSW' return 'no match' def get_apt_init(arch): \"Return proper", "a SE:dir pattern run(\"mkdir %s\" % arg) else: try: status = mkdir(arg, verbose=debug)", "print_info('Local data transfer') dqueue(arg) elif arg == 'dashboard': userdn = os.environ.get('USER_DN', None) if", "import architectures as tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR, working_pem", "in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get", "' switch to given CMSSW release and setup its environment\\n' msg += msg_green('arch", "'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH']", "or set internal pager Examples: cmssh> pager # shows current setting cmssh> pager", "arg.rsplit(' ', 1) if dst.find('&') != -1: background = True dst = dst.replace('&',", "root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path =", "['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s' % sename, 'mkdir %s/foo' %", "get_ipython() if arg == '': print_info(\"Verbose level is %s\" % ipth.debug) else: if", "print \"While executing cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst", "run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd = 'source `find %s/%s/external/apt -name", "local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug = get_ipython().debug except:", "releases, including online, tests, etc. \"\"\" if arg: print \"CMSSW releases for %s", "we don't need # to add scramv1 command in front of edm one,", "source_file\") dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\" %", "for reference: # cmd = \"eval `scramv1 runtime -sh`; %s\" % fname cmd", "+= ['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch list',", "\"\"\" if arg == 'new': msg = 'You can post new ticket via", "or cp /store/user/file.root .\\n' msg += msg_green('info ') \\ + ' provides detailed", "query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which lists all registered cmssh", "type:\", mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0] ==", "given CMSSW release\" args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output =", "cmd = \"rm %s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg,", "'release', 'file'] for item in entities: if arg.startswith(item + '='): startswith = item", "arg: if arg not in ['0', '1']: print_error('Please provide 0/1 for debug_http command')", "return # check if we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg =", "= '%s release is not officially supported under %s' \\ % (rel, arch)", "= [a for a in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show or", "ticket', default='N'): print_info('Aborting your action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time()))", "e.g. Examples: cmssh> tickets # list all cmssh tickets cmssh> ticket 14 #", "'source %s; apt-cache search %s | grep -v -i fwlite' % (script, rel)", "pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir", "cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]}", "cms_archs: msg = 'Wrong architecture, please choose from the following list\\n' msg +=", "= [] for arch, status in get_release_arch(rel): if not status: msg = '%s", "pat = pat_release if not pat.match(rel): msg = 'Fail to validate release name", "and installed architecture(s) cmssh> arch all # show all known CMSSW architectures cmssh>", "did your email address\" print_error(msg) return desc = '' msg = 'Type your", "magic functions. It holds given command and provide a method to execute it", "lookup given query in CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find file", "question\\n' msg += '\\nAvailable GRID commands: <cmd> either grid or voms\\n' msg +=", "block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try:", "# Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt", "'%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework',", "UNIX command cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\"", "import mimetypes import traceback import subprocess # cmssh modules from cmssh.iprint import msg_red,", "= arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\" % arg run(cmd)", "-1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel in", "0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '') res", "import dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from", "msg = '%s release is not officially supported under %s' \\ % (rel,", "as tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils", "name else: if check_os(name): print name else: cms_archs = cms_architectures('all') if arg not", "(rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)):", "\"\\nYou don't have yet CMSSW release installed on your system.\" msg += \"\\nPlease", "= os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if", "file/LFN, e.g. cp local.file or cp /store/user/file.root .\\n' msg += msg_green('info ') \\", "user_input('Proceed', default='N'): return email = raw_input('Your Email : ') if not email: msg", "'%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def", "your proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery results are accessible via %s", "path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] =", "run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from", "e.g. ls, cp, supported in cmssh. \"\"\" # system modules import os import", "rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ + ' copy file/LFN, e.g. cp", "if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi info for a given", "path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in ['external', 'lib']:", "else: print 'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name in", "pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information, user=%s' % user) res = jobsummary({'user':", "msg += msg_green('root ') + ' invoke ROOT\\n' msg += msg_green('du ') \\", "gist %s' % res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res) else: ticket =", "given command within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH',", "'rmdir %s/foo' % sename, 'ls %s' % sename, ] cmd_list += ['cp %s", "lists all registered cmssh commands in current shell. Examples: cmssh> cmshelp commands \"\"\"", "Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB", "with system CMSSW install area' print msg return # check if given release/architecture", "supported entities: dataset, block, file, run, lumi, site, user \"\"\" lookup(arg) def cms_du(arg):", "') \\ + ' list file/LFN, e.g. ls local.file or ls /store/user/file.root\\n' msg", "cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\"", "entities (se, site, dataset, block, run, release, file). Examples: cmssh> ls # UNIX", "time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}} res = post_ticket(key, files) if", "user based directory structure. Examples: cmssh> cmsrel # reset CMSSW environment to cmssh", "\"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version", "is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup of given query", "or arg == '0': ipth.debug = False else: ipth.debug = True # CMSSW", "command in subprocess.call since it invokes # wget/curl and it can be spawned", "rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ + ' list file/LFN, e.g. ls", "os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area for %s release ...\" % rel", "msg += msg_green('du ') \\ + ' display disk usage for given site,", "')': arg = arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case)", "'debug_http 0'] cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list", "T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s' % sename, 'mkdir %s/foo' % sename,", "files/dirs to/from local files/dirs or CMS storate elements. Examples: cmssh> cp file1 file2", "command to install given CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel =", "return 'no match' def get_apt_init(arch): \"Return proper apt init.sh for given architecture\" apt_dir", "following options -rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'],", "break if not rel_arch: msg = 'Release ' + msg_red(rel) msg += '", "interactive feature if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'],", "rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory", "'ok': msg = '\\nCheck release architecture status: %s' % status print msg return", "%s | grep -v -i fwlite' % (script, rel) run(cmd) if rel.lower().find('patch') !=", "information, user=%s' % user) res = jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True,", "# check if release version and work area are set (should be set", "-l %s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to run", "files = {key: {'content': desc}} res = post_ticket(key, files) if res.has_key('html_url'): print_status('New gist", "given CMS entity, ' \\ + 'e.g. info run=160915\\n' msg += msg_green('das ')", "= 0 msg = 'Bootstrap %s ...' % arch # run bootstrap command", "'lfn', 'dataset', 'block', 'run', 'release', 'file'] for item in entities: if arg.startswith(item +", "'Would you like to create one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as", "from cmssh.utils import list_results, check_os, unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy, run,", "CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel = rel.strip() if not rel or rel", "cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k for k, v in", "installed within cmssh, proceed' \\ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] =", "os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' %", "<all> Examples: cmssh> releases # show installed CMSSW releases cmssh> releases list #", "'please use ' + msg_green('cmsrel') + ' command and ' msg += 'CMS", "= '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) #", "copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of", "mdict = get_ipython().magics_manager.lsmagic() cmds = [k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1]", "cmssh command to run ROOT xrdcp via cmssh shell Examples: cmssh> xrdcp /a/b/c.root", "arch) print_warning(msg) msg = '\\n%s/%s is not installed within cmssh, proceed' \\ %", "'') dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst)", "(arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not rel_arch: msg", "status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command", "provide a method to execute it in a shell \"\"\" def __init__(self, cmd):", "rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output =", "+= ' command to list available releases.\\n' msg += 'Use ' + msg_green('install", "# show installed CMSSW releases cmssh> releases list # list available CMSSW releases", "DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if", "arg = arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug) def cms_json(arg): \"Print or", "= 'Release ' + msg_red(rel) msg += ' is not yet installed on", "== '0': ipth.debug = False else: ipth.debug = True # CMSSW commands def", "CMS storage element. Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg =", "does not exists' % dst) else: raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\"", "of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir):", "transfer jobs - site, which lists jobs at given site - dashboard, which", "res = [] try: debug = get_ipython().debug except: debug = 0 orig_arg =", "pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try:", "# reset CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython =", "elements. Examples: cmssh> cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root", "shell=True) def installed_releases(): \"Print a list of releases installed on a system\" _osname,", "check_release_arch(rel) if status != 'ok': msg = '\\nCheck release architecture status: %s' %", "file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file:", "= [k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort() for key in", "arg: # print_info(\"Set debug level to %s\" % arg) # DEBUG.set(arg) # else:", "0 if not arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\" %", "# check if given release/architecture is in place status = check_release_arch(rel) if status", "else: print \"Invalid DBS instance\" else: msg = \"DBS instance is set to:", "%s' % fname) try: debug = get_ipython().debug except: debug = 0 if debug", "call=True) cmd = 'source `find %s/%s/external/apt -name init.sh | tail -1`; ' \\", "if status != 'ok': msg = '\\nCheck release architecture status: %s' % status", "args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given command in original shell environment\" cmd", "csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find", "CMSSW release\" args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = []", "gcc_init, root_init) cmd = '%s root -l %s' % (pkgs_init, arg.strip()) run(cmd) def", "% rel cmd = 'source %s; apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd,", "rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd", "for pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s'", "+ \\ msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\" cmshelp command Examples:", "not a SE:dir pattern run(\"mkdir %s\" % arg) else: try: status = mkdir(arg,", "% rel cmd = 'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel) else:", "cmd = split[0] args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info", "status = copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return", "idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for", "def execute(self, args=''): \"Execute given command in current shell environment\" cmd = '%s", "commands in current shell. Examples: cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds", "!= -1: arg, out = orig_arg.split('>', 1) out = out.strip() arg = arg.strip()", "try: status = copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None):", "gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg =", "local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ + ' copy file/LFN,", "res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command lists local", "if not os.path.exists(dst): print_error('File %s does not exists' % dst) else: raise Exception('Not", "cms_json.read() def integration_tests(_arg): \"Run series of integration tests for cmssh\" for fname in", "[a for a in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show or set", "output: return ', '.join(output) osname, osarch = osparameters() if osname == 'osx' and", "= \"eval `scramv1 runtime -sh`; %s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line',", "List of supported entities: dataset, block, file, run, lumi, site, user \"\"\" lookup(arg)", "since it invokes # wget/curl and it can be spawned into serate process,", "def cms_das(query): \"\"\" cmssh command which queries DAS data-service with provided query. Examples:", "not exist on local filesystem or in DBS\" % arg print_error(msg) elif pat_run.match(arg):", "cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import das_client", "e.g. dataset, block, file, run. Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh>", "supported under %s' \\ % (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg", "print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt", "') \\ + ' show or switch to given CMSSW architecture, accept <list|all>", "if not user_input('Proceed', default='N'): return email = raw_input('Your Email : ') if not", "/path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ + ' list file/LFN,", "\"\"\" check_voms_proxy() background = False orig_arg = arg arg = arg.strip() try: last_arg", "% rel # final message print \"%s is ready, cwd: %s\" % (rel,", "msg def cms_read(arg): \"\"\" cmssh command to read provided HTML page (by default", "+= msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \\ + 'e.g. mkdir /path/foo", "\"\"\" if arg: if arg.strip() == 'commands': cms_commands() return ipython = get_ipython() if", "uinput.strip() == 'EOF': break desc += uinput + '\\n' except KeyboardInterrupt: break if", "\\ + ' query DAS and return data in JSON format\\n' msg +=", "res = run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=', '') res = release_info(arg,", "provides information about jobs at give site or for given user. It accepts", "not arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs = [] for name", "cmssh> releases list # list available CMSSW releases on given platform cmssh> releases", "cmssh> find user=oliver List of supported entities: dataset, block, file, run, lumi, site,", "DBS global instance\\n' msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \\", "pat_release if not pat.match(rel): msg = 'Fail to validate release name \"%s\"' %", "+= msg_green('das_json ') \\ + ' query DAS and return data in JSON", "'' else: cmd = split[0] args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\"", "print msg while True: try: uinput = raw_input() if uinput.strip() == 'EOF': break", "to apt-get interactive feature if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\ %", "grid or voms\\n' msg += msg_green('vomsinit ') \\ + ' setup your proxy", "via cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None)", "# UNIX command cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915", "`find %s -name init.sh | tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd", "'%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir,", "arg) else: try: status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\"", "== '': print_info(\"Verbose level is %s\" % ipth.debug) else: if arg == 0", "scram command\\n' msg += msg_green('cmsRun ') \\ + ' cmsRun command for release", "print_error(msg) return desc = '' msg = 'Type your problem, attach traceback, etc.", "in ['external', 'lib']: link = '%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s'", "release_info(arg, debug) elif startswith: msg = 'No pattern is allowed for %s look-up'", "' and hit ' + msg_blue('Enter') + '\\n' print msg while True: try:", "cmssh> arch list # show all CMSSW architectures for given platform \"\"\" if", "= release_info(arg, debug) elif startswith: msg = 'No pattern is allowed for %s", "else: msg = \"\\nYou don't have yet CMSSW release installed on your system.\"", "arg == 'dashboard': userdn = os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard", "item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg run(cmd, shell=True)", "arg.replace('run=', '') res = run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=', '') res", "cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res = None try: debug =", "else: if check_os(name): print name else: cms_archs = cms_architectures('all') if arg not in", "res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration object for given", "= \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and work", "use cmsenv\\n' msg += 'please use ' + msg_green('cmsrel') + ' command and", "cms_cp(arg): \"\"\" cmssh cp command copies local files/dirs to/from local files/dirs or CMS", "shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases():", "'').strip() src, dst = arg.rsplit(' ', 1) if dst.find('&') != -1: background =", "'otherwise it will be posted as anonymous gist ticket' print_info(msg) if not user_input('Proceed',", "% rel) msg += ' command to install given release.' print msg return", "jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name", "+= '\\nInstalled releases: ' + msg_green(', '.join(releases)) print msg return cmd = \"eval", "os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area: msg = 'In order to", "arg == 'all' or arg == 'list': if arg == 'all': print 'CMSSW", "cmssh> ticket new # post new ticket from cmssh # or post it", "cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root')", "print \"Set CMSSH pager to %s\" % arg else: val = os.environ.get('CMSSH_PAGER', None)", "if not arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':') == -1: # not", "mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/':", "list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res =", "to setup CMSSW environment. \"\"\" cmd = 'cmsRun %s' % arg cmsexe(cmd) def", "# print_info(\"Set debug level to %s\" % arg) # DEBUG.set(arg) # else: #", "+= '\\ncmssh command help : ' + msg_blue('cmshelp <command>') msg += '\\nInstall python", "voms-proxy-init on behalf of the user Examples: cmssh> vomsinit By default it applies", "print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res,", "file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] =", "command to show or set DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global", "= '' return cmd def cms_root(arg): \"\"\" cmssh command to run ROOT within", "pkg_init('lcg/root') pkgs_init = '%s %s %s' % (pcre_init, gcc_init, root_init) cmd = '%s", "run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find user=oliver List", "+= ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s'", "list of CMSSW architectures (aka SCRAM_ARCH)\" archs = [a for a in tc_architectures(arch_type)]", "tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show or set CMSSW architecture. Optional parameters", "os.getcwd() # check if src still has options and user asked for -f", "ticket new # post new ticket from cmssh # or post it at", "= site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg = arg.replace('lfn=', '')", "\"Execute given command in current shell environment\" cmd = '%s %s' % (self.cmd,", "cmd = \"eval `scramv1 runtime -sh`; %s\" % cmd run(cmd, shell=True, call=True) def", "= '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area for", "| grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print \"While executing cmd=%s\"", "if orig_arg.find('>') != -1: arg, out = orig_arg.split('>', 1) out = out.strip() arg", "arg.replace('user=', '') print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) if res:", "%s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to run ROOT", "+= 'appropriately and re-run crab command' print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0]", "'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg has been created, please edit", "+ 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ +", "\"\"\" cmssh disk utility cmssh command. Examples: cmssh> du # UNIX command cmssh>", "len(glob.glob(dst)): cmd = \"rm %s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status =", "cmssh.cern_html import read from cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects", "% (rel, idir)) if releases: releases.sort() print \"\\nInstalled releases:\" for rel in releases:", "coding: ISO-8859-1 -*- #pylint: disable-msg=W0702 \"\"\" Set of UNIX commands, e.g. ls, cp,", "% arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try: debug", "cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory from local file system or CMS", "msg return # check if we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg", "show or set DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg", "arg == 'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER']", "site = arg.replace('site=', '') print_info('Dashboard information, site=%s' % site) res = jobsummary({'site': site})", "= 'Initialize %s apt repository ...' % arch run(cmd, sdir, msg=msg, debug=debug, shell=True)", "def __init__(self, cmd): self.cmd = cmd def execute(self, args=''): \"Execute given command in", "it ' msg += 'appropriately and re-run crab command' print_info(msg) print \"cwd:\", os.getcwd()", "return 'OSX/ia32 is not supported in CMSSW' return 'no match' def get_apt_init(arch): \"Return", "os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG', 0)) def", "'%s release is not officially supported under %s' \\ % (rel, arch) print_warning(msg)", "'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset", "name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check if we", "%s file.root' % lfn, 'ls', 'cp file.root %s' % sename, 'ls %s' %", "cms_info(arg): \"\"\" cmssh info command provides information for given meta-data entity, e.g. dataset,", "current and installed architecture(s) cmssh> arch all # show all known CMSSW architectures", "CMSSW architectures cmssh> arch list # show all CMSSW architectures for given platform", "JSON: %s' % fname) try: debug = get_ipython().debug except: debug = 0 if", "0 if debug and access2file(fname): with open(fname, 'r') as cms_json: print cms_json.read() def", "\"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if", "query. Examples: cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit", "break desc += uinput + '\\n' except KeyboardInterrupt: break if not desc: msg", "= item.split(' ', 1) if len(split) == 1: cmd = item args =", "command which lists all registered cmssh commands in current shell. Examples: cmssh> cmshelp", "CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if not", "of job queue or CMS jobs\\n' msg += msg_green('read ') \\ + '", "local job queue or provides information about jobs at give site or for", "title, ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo", "traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka SCRAM_ARCH)\" archs = [a", "# system modules import os import re import sys import time import json", "'%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s -name init.sh", "msg += msg_green('find ') \\ + ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg", "run in current shell environment # old command for reference: # cmd =", "= os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR']", "= False else: ipth.debug = True # CMSSW commands def bootstrap(arch): \"Bootstrap new", "(os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area for %s release ...\" %", "+= msg_green('du ') \\ + ' display disk usage for given site, e.g.", "it setups CMSSW environment and creates user based directory structure. Examples: cmssh> cmsrel", "cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg =", "\"\"\" arg = arg.strip() res = [] try: debug = get_ipython().debug except: debug", "= arg.replace('file=', '') arg = arg.replace('lfn=', '') res = file_info(arg, debug) elif pat_block.match(arg):", "sename, 'ls %s' % sename, 'rmdir %s/foo' % sename, 'ls %s' % sename,", "cmd_list += ['releases list', 'arch list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr", "'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if", "given query in CMS data-services. \"\"\" arg = arg.strip() debug = get_ipython().debug args", "instance is set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable", "command which queries DAS data-service with provided query. Examples: cmssh> das dataset=/ZMM* \"\"\"", "msg += 'please use ' + msg_green('cmsrel') + ' command and ' msg", "RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init on behalf of", "which lists jobs of current user - user, which lists jobs of given", "as cms_json: print cms_json.read() def integration_tests(_arg): \"Run series of integration tests for cmssh\"", "the following list\\n' msg += ', '.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\"", "CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res = apply_filter(flt.strip(), gen)", "= split[0] args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info command", "\"Installing cms+cmssw-patch+%s ...\" % rel cmd = 'source %s; apt-get install cms+cmssw-patch+%s' %", "return 'ok' else: msg = '%s/%s rejected by user' % (rel, arch) output.append(msg)", "cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint import print_warning, print_error, print_status, print_info from", "args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity", "(aka voms-proxy-info)\\n' msg += '\\nQuery results are accessible via %s function, e.g.\\n' \\", "of given query in CMS data-services. cmssh find command lookup given query in", "ipython = get_ipython() rel = rel.strip() if not rel or rel in ['reset',", "pat.match(dst): if background: cmd = 'cp %s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp", "'line', name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel #", "%s\" % arg else: val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is", "+= msg_green('vomsinit ') \\ + ' setup your proxy (aka voms-proxy-init)\\n' msg +=", "else: if arg == 0 or arg == '0': ipth.debug = False else:", "item in entities: if arg.startswith(item + '='): startswith = item if os.path.isfile(orig_arg) or", "\"Run series of integration tests for cmssh\" for fname in ['file1.root', 'file2.root']: if", "if not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat = pat_se orig =", "if arg: if arg.strip() == 'commands': cms_commands() return ipython = get_ipython() if arg[0]", "inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n' msg += msg_green('find", "rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug =", "cmd = \"voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s\" % (key, cert)", "os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir)", "is not required to use cmsenv\\n' msg += 'please use ' + msg_green('cmsrel')", "cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh>", "os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules',", "os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname): # we", "') \\ + ' switch to given CMSSW release and setup its environment\\n'", "list', 'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch list', 'jobs', 'ls'] cmd_list +=", "') \\ + ' display disk usage for given site, e.g. du T3_US_Cornell\\n'", "from the following list\\n' msg += ', '.join(cms_archs) raise Exception(msg) print \"Switch to", "invoke ROOT\\n' msg += msg_green('du ') \\ + ' display disk usage for", "os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path:", "file=%s' % lfn] cmd_list += ['ls %s' % dataset, 'info %s' % dataset]", "0 limit = 0 debug = 0 das_client(host, query, idx, limit, debug, 'plain')", "release in question\\n' msg += '\\nAvailable GRID commands: <cmd> either grid or voms\\n'", "'Use ' + msg_green('install %s' % rel) msg += ' command to install", "will run in current shell environment # old command for reference: # cmd", "rm <options> source_file\") dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm", "\\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s -name init.sh |", "+ ' display disk usage for given site, e.g. du T3_US_Cornell\\n' msg +=", "'In order to run crab command you must ' msg += 'run '", "path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file: print demo_file.read() def results():", "cmssh> cmshelp commands cmssh> cmshelp ls \"\"\" if arg: if arg.strip() == 'commands':", "CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH']", "utils for given release ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch)", "for given release ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir", "all registered cmssh commands in current shell. Examples: cmssh> cmshelp commands \"\"\" mdict", "to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel", "%s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list of releases", "= 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd)", "arg.replace('site=', '') res = list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=', '') res", "= os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is set to: %s\" % val", "in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir)", "set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS instances:'", "print r, type(r)\\n' msg += '\\nList cmssh commands : ' + msg_blue('commands') msg", "correct release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check", "CMS entity, ' \\ + 'e.g. info run=160915\\n' msg += msg_green('das ') +", "+ msg_green('releases') msg += ' command to list available releases.\\n' msg += 'Use", "not status: msg = '%s release is not officially supported under %s' \\", "install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat = pat_release if not pat.match(rel): msg", "arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s", "'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python' %", "command in current shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) run(cmd)", "setup its environment\\n' msg += msg_green('arch ') \\ + ' show or switch", "from cmssh.utils import execmd, touch, platform, fix_so from cmssh.cmsfs import dataset_info, block_info, file_info,", "https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in cmd_list: print_info(\"Execute %s\" % item) split", "'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s' % run,", "magic: if arg[0] == '/': cmd = '-e -f file:///%s' % fname else:", "arg = arg.replace('file=', '') arg = arg.replace('lfn=', '') res = file_info(arg, debug) elif", "cms_archs = cms_architectures('all') if arg not in cms_archs: msg = 'Wrong architecture, please", "post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg = 'You can", "ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\"", "flt = None startswith = None entities = \\ ['se', 'site', 'lfn', 'dataset',", "+ ' switch to given CMSSW release and setup its environment\\n' msg +=", "name) if name.find('edm') == 0 and os.path.isfile(fname): # we use Magic(cmd).execute we don't", "in CMS data-services. cmssh find command lookup given query in CMS data-services. Examples:", "user asked for -f options = src.split(' ') if len(options) > 1 and", "service please ensure that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not", "and returns results in JSON data format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host", "execmd, touch, platform, fix_so from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info from", "arg == 'all': print name else: if check_os(name): print name else: cms_archs =", "holds given command and provide a method to execute it in a shell", "are accessible via %s function, e.g.\\n' \\ % msg_blue('results()') msg += ' find", "return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir =", "Examples: cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh>", "\"\"\" Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\", "run, 'ls file=%s' % lfn] cmd_list += ['ls %s' % dataset, 'info %s'", "show all CMSSW architectures for given platform \"\"\" if not arg: print \"Current", "'Release ' + msg_red(rel) msg += ' is not yet installed on your", "in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases: releases.sort() print \"\\nInstalled releases:\" for", "+ msg_blue('cmsrel') + ' command' print_error(msg) return # check existence of crab.cfg crab_dir", "to %s DBS instance\" % arg else: print \"Invalid DBS instance\" else: msg", "%s\" % arg) # DEBUG.set(arg) # else: # print_info(\"Debug level is %s\" %", "target_{file,directory}\") pat = pat_se orig = src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst):", "\"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\"", "pprint import mimetypes import traceback import subprocess # cmssh modules from cmssh.iprint import", "%s' % dataset, 'info %s' % dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*',", "\"\"\" Show or set HTTP debug flag. Default is 0. \"\"\" arg =", "check if given release name is installed on user system rel_dir = '%s/cms/cmssw/%s'", "'\\nAvailable DBS instances:' for inst in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\"", "user) res = jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information,", "local file system or CMS storage element. Examples: cmssh> rmdir foo cmssh> rmdir", "= arg.replace('file=', '') if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print", "= [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1: archs.append(name)", "= 'Available cmssh commands:\\n' msg += msg_green('find ') \\ + ' search CMS", "> 0 and par[0] == '-': opts.append(par) return opts class Magic(object): \"\"\" Class", "cms_architectures('all'): if arg == 'all': print name else: if check_os(name): print name else:", "== 'new': msg = 'You can post new ticket via web interface at\\n'", "mkdir command creates directory on local filesystem or remote CMS storage element. Examples:", "query, idx, limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh command which queries DAS", "<list|all> args\\n' msg += msg_green('scram ') + ' CMSSW scram command\\n' msg +=", "dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs", "cmssh command which queries DAS data-service with provided query and returns results in", "disk utility cmssh command. Examples: cmssh> du # UNIX command cmssh> du T3_US_Cornell", "arg.strip() else: flt = None if arg: arg = arg.strip() if not arg", "msg += msg_green('das ') + ' query DAS service\\n' msg += msg_green('das_json ')", "platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for r in res]", "'.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def", "it applies the following options -rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\"", "try: debug = get_ipython().debug except: debug = 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=',", "= 0 if not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat = pat_se", "if arg not in cms_archs: msg = 'Wrong architecture, please choose from the", "% (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given command in original shell", "environment\\n' msg += msg_green('arch ') \\ + ' show or switch to given", "if not arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg)", "if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to %s DBS instance\"", "os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg =", "local_file cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try:", "'lib']: link = '%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root,", "% fname) try: debug = get_ipython().debug except: debug = 0 if debug and", "functions. It holds given command and provide a method to execute it in", "`scramv1 runtime -sh`' msg = 'Within cmssh it is not required to use", "else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA']", "== '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg", "dqueue(arg) elif arg == 'dashboard': userdn = os.environ.get('USER_DN', None) if userdn: user =", "is set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS", "= out.strip() arg = arg.strip() else: out = None if arg: arg =", "instance, default is DBS global instance\\n' msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir", "get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('|') != -1: arg,", "subprocess.call(cmd, shell=True) # use subprocess due to apt-get interactive feature if platform() ==", "set location of CMS JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON'] = arg", "\"Print or set location of CMS JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON']", "cms_das(query): \"\"\" cmssh command which queries DAS data-service with provided query. Examples: cmssh>", "= arg.strip() try: last_arg = arg.split(' ')[-1].strip() if last_arg == '&': background =", "given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel +", "print \"Invalid DBS instance\" else: msg = \"DBS instance is set to: %s\"", "cmshelp cmssh> cmshelp commands cmssh> cmshelp ls \"\"\" if arg: if arg.strip() ==", "in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir)", "%s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if arg == 'all':", "jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information, site=%s' % site)", "arg or arg == 'list': print_info('Local data transfer') dqueue(arg) elif arg == 'dashboard':", "['releases list', 'arch list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython()", "'' return cmd def cms_root(arg): \"\"\" cmssh command to run ROOT within cmssh", "cmssh> releases all # show all known CMS releases, including online, tests, etc.", "add scramv1 command in front of edm one, since # execute method will", "print cms_json.read() def integration_tests(_arg): \"Run series of integration tests for cmssh\" for fname", "= dst.replace('&', '').strip() if dst == '.': dst = os.getcwd() # check if", "service\\n' msg += msg_green('das_json ') \\ + ' query DAS and return data", "= '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in ['external', 'lib']: link =", "with open(fname, 'r') as cms_json: print cms_json.read() def integration_tests(_arg): \"Run series of integration", "'%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not", "+ ' cmsRun command for release in question\\n' msg += '\\nAvailable GRID commands:", "\"Invalid DBS instance\" else: msg = \"DBS instance is set to: %s\" \\", "= jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information, site=%s' %", "dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi", "Examples: cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init =", "data-service with provided query and returns results in JSON data format Examples: cmssh>", "if '-cache' in arg or '-get' in arg: cmd = 'apt%s' % arg", "import subprocess # cmssh modules from cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint", "return ipython = get_ipython() if arg[0] == '(' and arg[-1] == ')': arg", "if arg[0] == '(' and arg[-1] == ')': arg = arg[1:-1] for case", "+= ['ls %s' % dataset, 'info %s' % dataset] cmd_list += ['find dataset=/ZMM*',", "pprint.pformat(res) else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else:", "rel) msg += ' command to install given release.' print msg return #", "provided query and returns results in JSON data format Examples: cmssh> das_json dataset=/ZMM*", "\\ msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh>", "available CMSSW releases, accepts <list|all> args\\n' msg += msg_green('install ') \\ + '", "command to execute CMSSW cmsRun command. Requires cmsrel to setup CMSSW environment. \"\"\"", "cmssh modules from cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint import print_warning, print_error,", "= arg print_info('CMS_JSON: %s' % arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s'", "cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find user=oliver List of", "== -1: archs.append(name) if archs: print '\\nInstalled architectures:' for item in archs: print", "your action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key:", "cmshelp commands cmssh> cmshelp ls \"\"\" if arg: if arg.strip() == 'commands': cms_commands()", "+= ['ls dataset=%s' % dataset, 'ls run=%s' % run, 'ls file=%s' % lfn]", "args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info command provides information", "user) res = jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg):", "-sh`; %s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh", "find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find user=oliver List of supported", "platform \"\"\" if not arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs =", "os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set", "if not email: msg = \"You did your email address\" print_error(msg) return desc", "with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg has been created,", "+= msg_green('ls ') \\ + ' list file/LFN, e.g. ls local.file or ls", "arg) else: try: status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\"", "is 0. \"\"\" arg = arg.strip() if arg: if arg not in ['0',", "= '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path", "block, file, run, lumi, site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk", "os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases: releases.sort() print", "demo file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as", "\"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s", "arch all # show all known CMSSW architectures cmssh> arch list # show", "'CMSSW architectures:' else: print 'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for", "%s function, e.g.\\n' \\ % msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg +=", "not user_input('Send this ticket', default='N'): print_info('Aborting your action') return key = '<KEY>' %", "get_release_arch(rel): \"Return architecture for given CMSSW release\" args = {'release': rel} releases =", "...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv is", "+= msg_green('info ') \\ + ' provides detailed info about given CMS entity,", "ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s' % dataset, 'ls", "raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command which queries DAS data-service with", "reference: # cmd = \"eval `scramv1 runtime -sh`; %s\" % fname cmd =", "given command in original shell environment\" cmd = '%s %s' % (self.cmd, args.strip())", "os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON:", "msg = 'Within cmssh it is not required to use cmsenv\\n' msg +=", "use Magic(cmd).execute we don't need # to add scramv1 command in front of", "-name init.sh | tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd = ''", "%s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command within CMSSW environment", "item elif arg == 'all' or arg == 'list': if arg == 'all':", "def cms_commands(_arg=None): \"\"\" cmssh command which lists all registered cmssh commands in current", "%s %s\" % (src, dst)) else: try: status = copy_lfn(orig, dst, debug, background,", "% arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt commands\" if '-cache' in", "By default it applies the following options -rfc -voms cms:/cms -key <userkey.pem> -cert", "return msg def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp commands", "or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print", "= arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg,", "arch # run bootstrap command in subprocess.call since it invokes # wget/curl and", "from cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint import print_warning, print_error, print_status, print_info", "given release name is installed on user system rel_arch = None for arch", "<all> or <list> Examples: cmssh> arch # show current and installed architecture(s) cmssh>", "queries DAS data-service with provided query and returns results in JSON data format", "or '-get' in arg: cmd = 'apt%s' % arg else: msg = 'Not", "import das_client from cmssh.url_utils import get_data, send_email from cmssh.regex import pat_release, pat_site, pat_dataset,", "from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run, pat_se,", "= os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']: pdir =", "None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'],", "else: gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res)", "= [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir = os.path.join(\\", "arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\" # debug", "accepts <list|all> args\\n' msg += msg_green('install ') \\ + ' install CMSSW release,", "= pat_se orig = src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst): if background:", "open(path, 'r') as demo_file: print demo_file.read() def results(): \"\"\"Return results from recent query\"\"\"", "display disk usage for given site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW", "arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt commands\" if '-cache' in arg", "check_os(name) and name.find('.') == -1: archs.append(name) if archs: print '\\nInstalled architectures:' for item", "ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg = arg.strip()", "release version and work area are set (should be set at cmsrel) rel", "else: # print_info(\"Debug level is %s\" % DEBUG.level) def debug_http(arg): \"\"\" Show or", "print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka SCRAM_ARCH)\" archs", "Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return", "dst = arg.rsplit(' ', 1) if dst.find('&') != -1: background = True dst", "not os.path.isdir(pkg_dir): cmd = '' return cmd def cms_root(arg): \"\"\" cmssh command to", "except: debug = 0 fname = arg.replace('file=', '') if arg and os.path.isfile(fname): mtype", "\"Create user area for %s release ...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv", "cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if", "'Wrong architecture, please choose from the following list\\n' msg += ', '.join(cms_archs) raise", "Examples: cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit =", "'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0'] cmd_list += ['ls', 'mkdir ttt', 'ls", "1: # no filter res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt", "(should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None)", "-1: print \"Installing cms+cmssw-patch+%s ...\" % rel cmd = 'source %s; apt-get install", "list all cmssh tickets cmssh> ticket 14 # get details for given ticket", "project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time", "'Documentation is not available' else: doc = cms_help_msg() print doc def cms_rm(arg): \"\"\"", "msg_green('das ') + ' query DAS service\\n' msg += msg_green('das_json ') \\ +", "+ ' show your proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery results are", "cms_rm(arg): \"\"\" CMS rm command works with local files/dirs and CMS storate elements.", "lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases", "get_ipython() for item in cmd_list: print_info(\"Execute %s\" % item) split = item.split(' ',", "fix_so(idir) print \"Create user area for %s release ...\" % rel cmsrel(rel) def", "cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg", "= src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst): if background: cmd = 'cp", "Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=',", "rmdir, list_se, dqueue from cmssh.utils import list_results, check_os, unsupported_linux, access2file from cmssh.utils import", "# no filter res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt in", "cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return #", "item in cmd_list: print_info(\"Execute %s\" % item) split = item.split(' ', 1) if", "os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s rejected by user'", "cmd_list: print_info(\"Execute %s\" % item) split = item.split(' ', 1) if len(split) ==", "rel_arch = item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output def", "pat_se orig = src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst): if background: cmd", "debug = 0 das_client(host, query, idx, limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh", "if not status: msg = '%s release is not officially supported under %s'", "message print \"%s is ready, cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\"", "res = dataset_info(arg, debug) except IndexError: msg = \"Given pattern '%s' does not", "fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command copies local files/dirs to/from", "cmssh> cmsrel # reset CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\"", "ls, cp, supported in cmssh. \"\"\" # system modules import os import re", "architecture: %s\" % os.environ['SCRAM_ARCH'] archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name)", "get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\")", "', '.join(output) osname, osarch = osparameters() if osname == 'osx' and osarch ==", "arg.strip() # if arg: # print_info(\"Set debug level to %s\" % arg) #", "etc. Once done print ' msg += msg_blue('EOF') + ' and hit '", "+= ['find lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list", "'You can post new ticket via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg", "startswith print_error(msg) else: cmd = 'ls ' + orig_arg run(cmd, shell=True) if res:", "= True dst = dst.replace('&', '').strip() if dst == '.': dst = os.getcwd()", "msg = \"cmssh pager is set to: %s\" % val print msg def", "ticket via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will", "in cmssh. \"\"\" # system modules import os import re import sys import", "not catch it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd = 'source", "cmd = \"eval `scramv1 runtime -sh`; %s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute,", "os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg = 'Bootstrap %s ...' % arch #", "Magic(cmd).execute we don't need # to add scramv1 command in front of edm", "cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances,", "try: debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rm", "to execute it in a shell \"\"\" def __init__(self, cmd): self.cmd = cmd", "'cp %s file2.root &' % lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs list',", "+ ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + '", "rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in", "'') res = run_lumi_info(arg, debug) def cms_json(arg): \"Print or set location of CMS", "background = True arg = arg.replace('&', '').strip() src, dst = arg.rsplit(' ', 1)", "if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not allowed to install new release,", "osparameters() releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir", "try: debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: cp", "\"\"\" def __init__(self, cmd): self.cmd = cmd def execute(self, args=''): \"Execute given command", "+ ' command and ' msg += 'CMS release environment will be set", "' status of job queue or CMS jobs\\n' msg += msg_green('read ') \\", "element. Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try:", "and arg[-1] == ')': arg = arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]:", "installed on your system.\" msg += \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\", "ROOT xrdcp via cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path =", "print_error('Please provide 0/1 for debug_http command') return print_info(\"Set HTTP debug level to %s\"", "cmsenv is an alias to: eval `scramv1 runtime -sh`' msg = 'Within cmssh", "-cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd", "arg else: print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\"", "' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg = 'Bootstrap %s", "get_ipython().debug args = arg.split('|') if len(args) == 1: # no filter res =", "debug) except IndexError: msg = \"Given pattern '%s' does not exist on local", "if release version and work area are set (should be set at cmsrel)", "filesystem or in DBS\" % arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '')", "sename, 'mkdir %s/foo' % sename, 'ls %s' % sename, 'rmdir %s/foo' % sename,", "arg: cmd = 'apt%s' % arg else: msg = 'Not supported apt command'", "in front of edm one, since # execute method will run in current", "0 or arg == '0': ipth.debug = False else: ipth.debug = True #", "print_info(msg) if not user_input('Proceed', default='N'): return email = raw_input('Your Email : ') if", "find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi", "\"\"\" cmssh command which lists all registered cmssh commands in current shell. Examples:", "one' print msg def cms_read(arg): \"\"\" cmssh command to read provided HTML page", "print name else: if check_os(name): print name else: cms_archs = cms_architectures('all') if arg", "arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '') res = run_info(arg, debug) elif", "% orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" % (src, dst)) else: try:", "% (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to", "cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv is an alias to:", "...\" % rel cmd = 'source %s; apt-get install cms+cmssw+%s' % (script, rel)", "dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4],", "das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug", "information about cmssh tickets, e.g. Examples: cmssh> tickets # list all cmssh tickets", "'') if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\",", "res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res) else: ticket = res to_user =", "elif pat_se.match(arg): arg = arg.replace('site=', '') res = list_se(arg, debug) elif pat_site.match(arg): arg", "hit ' + msg_blue('Enter') + '\\n' print msg while True: try: uinput =", "not desc: msg = \"You did not provide bug description\" print_error(msg) return if", "to add scramv1 command in front of edm one, since # execute method", "'%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for", "arg print_info('CMS_JSON: %s' % arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' %", "msg while True: try: uinput = raw_input() if uinput.strip() == 'EOF': break desc", "config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704", "% arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for given", "% ipth.debug) else: if arg == 0 or arg == '0': ipth.debug =", "'\\nCheck release architecture status: %s' % status print msg return print \"Searching for", "'Within cmssh it is not required to use cmsenv\\n' msg += 'please use", "rel or not work_area: msg = 'In order to run crab command you", "cmssh.url_utils import get_data, send_email from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex", "debug level to %s\" % arg) # DEBUG.set(arg) # else: # print_info(\"Debug level", "return print \"Searching for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source", "' copy file/LFN, e.g. cp local.file or cp /store/user/file.root .\\n' msg += msg_green('info", "= get_ipython().debug args = arg.split('|') if len(args) == 1: # no filter res", "% (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to run ROOT xrdcp", "name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1: archs.append(name) if archs: print", "for given ticket id cmssh> ticket new # post new ticket from cmssh", "os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s -name init.sh | tail -1`;' %", "arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'],", "format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit", "works with local files/dirs and CMS storate elements. Examples: cmssh> rm local_file cmssh>", "releases installed on a system\" _osname, osarch = osparameters() releases = [] for", "% res['html_url']) title = 'cmssh gist %s' % res['html_url'] if isinstance(res, dict): ticket", "= apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity level \"\"\"", "if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0", "'') arg = arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug) def cms_json(arg): \"Print", "(os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\"", "cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init on behalf of the user Examples:", "the following list of options: - list, which lists local transfer jobs -", "'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ + '", "= \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM'", "= arg.replace('dataset=', '') try: res = dataset_info(arg, debug) except IndexError: msg = \"Given", "' cmd += 'apt-get update; ' msg = 'Initialize %s apt repository ...'", "msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \\ + 'e.g. mkdir", "arg.strip() try: last_arg = arg.split(' ')[-1].strip() if last_arg == '&': background = True", "not userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n',", "# execute method will run in current shell environment # old command for", "site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg = arg.replace('lfn=', '') res", "if orig_arg.find('|') != -1: arg, flt = orig_arg.split('|', 1) arg = arg.strip() else:", "T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug = 0", "'') print_info('Dashboard information, site=%s' % site) res = jobsummary({'site': site}) elif pat_user.match(arg): user", "%s' % sename, 'rm %s/file.root' % sename, 'ls %s' % sename, 'rm file.root',", "k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort() for key in cmds: print key", "'ls %s' % sename, ] cmd_list += ['cp %s file.root' % lfn, 'ls',", "'das dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list += ['find lumi dataset=%s' %", "if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0]", "debug = 0 msg = 'Bootstrap %s ...' % arch # run bootstrap", "' + msg_green('cmsrel') + ' command and ' msg += 'CMS release environment", "% lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list +=", "msg = '\\n%s/%s is not installed within cmssh, proceed' \\ % (rel, arch)", "msg = 'Would you like to create one' if user_input(msg, default='N'): with open('crab.cfg',", "and os.path.isfile(fname): # we use Magic(cmd).execute we don't need # to add scramv1", "%s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS instances:' for inst", "site or for given user. It accepts the following list of options: -", "'%s' does not exist on local filesystem or in DBS\" % arg print_error(msg)", "= '%s %s %s' % (pcre_init, gcc_init, root_init) cmd = '%s root -l", "!= -1: print \"Installing cms+cmssw-patch+%s ...\" % rel cmd = 'source %s; apt-get", "of integration tests for cmssh\" for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname)", "debug = 0 fname = arg.replace('file=', '') if arg and os.path.isfile(fname): mtype =", "/store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg =", "mkdir, rmdir, list_se, dqueue from cmssh.utils import list_results, check_os, unsupported_linux, access2file from cmssh.utils", "cms_dbs_prod_global \"\"\" arg = arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print", "0/1 for debug_http command') return print_info(\"Set HTTP debug level to %s\" % arg)", "% (rel, arch) output.append(msg) if output: return ', '.join(output) osname, osarch = osparameters()", "print msg print '\\nAvailable DBS instances:' for inst in dbs_instances(): print inst def", "-l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg = arg.strip() res", "work area are set (should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None)", "environment\" cmd = '%s %s' % (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute", "is set to: %s\" % val print msg def dbs_instance(arg=None): \"\"\" cmssh command", "at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg = 'You can post new", "or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ + ' copy file/LFN, e.g.", "run(cmd) def subprocess(self, args=''): \"Execute given command in original shell environment\" cmd =", "from run-time environment cmd = 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS='", "content\\n' msg += msg_green('root ') + ' invoke ROOT\\n' msg += msg_green('du ')", "arg.strip() debug = get_ipython().debug args = arg.split('|') if len(args) == 1: # no", "dst = dst.replace('&', '').strip() if dst == '.': dst = os.getcwd() # check", "releases for %s platform\" % platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases =", "print_info('CMS JSON: %s' % fname) try: debug = get_ipython().debug except: debug = 0", "not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found", "+= msg_green('scram ') + ' CMSSW scram command\\n' msg += msg_green('cmsRun ') \\", "= 0 if debug and access2file(fname): with open(fname, 'r') as cms_json: print cms_json.read()", "show all known CMSSW architectures cmssh> arch list # show all CMSSW architectures", "= base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def", "on user system rel_arch = None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s'", "bootstrap(arch) return 'ok' else: msg = '%s/%s rejected by user' % (rel, arch)", "== 'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] =", "software: ' + \\ msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\" cmshelp", "os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res", "+= ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check if we have stand-alone", "res = post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url']) title", "msg += '\\nList cmssh commands : ' + msg_blue('commands') msg += '\\ncmssh command", "= \"scramv1 project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT", "\"\"\" Set of UNIX commands, e.g. ls, cp, supported in cmssh. \"\"\" #", "' \\ + 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ')", "time.gmtime(time.time())) files = {key: {'content': desc}} res = post_ticket(key, files) if res.has_key('html_url'): print_status('New", "None) msg = \"cmssh pager is set to: %s\" % val print msg", "'%s|\\#> ' % rel # final message print \"%s is ready, cwd: %s\"", "return try: debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage:", "' % rel # final message print \"%s is ready, cwd: %s\" %", "'sh -x %s/bootstrap.sh setup -path %s -arch %s' % (swdir, swdir, arch) if", "args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item", "not arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg) else:", "for rel in releases: print rel else: msg = \"\\nYou don't have yet", "name is installed on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if", "provide 0/1 for debug_http command') return print_info(\"Set HTTP debug level to %s\" %", "out.strip() arg = arg.strip() else: out = None if arg: arg = arg.strip()", "either <list> or <all> Examples: cmssh> releases # show installed CMSSW releases cmssh>", "your system.\\n' msg += 'Use ' + msg_green('releases') msg += ' command to", "Return configuration object for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg:", "res = das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\"", "if arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return", "cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915 cmssh> info local_file.root Please", "options(arg): \"\"\"Extract options from given arg string\"\"\" opts = [] for par in", "cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find", "# get details for given ticket id cmssh> ticket new # post new", "+= ' command to install given release.' print msg return # set release", "% (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers',", "command cmssh> du T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg) else: cmd", "ipth.debug = False else: ipth.debug = True # CMSSW commands def bootstrap(arch): \"Bootstrap", "%s; apt-get install cms+cmssw-patch+%s' % (script, rel) else: print \"Installing cms+cmssw+%s ...\" %", "import jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract", "'dashboard': userdn = os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s'", "# final message print \"%s is ready, cwd: %s\" % (rel, os.getcwd()) def", "= arg.strip() else: out = None if arg: arg = arg.strip() read(arg, out,", "%s %s' % (pcre_init, gcc_init, root_init) cmd = '%s root -l %s' %", "% (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while", "PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import read from cmssh.dashboard", "'\\nQuery results are accessible via %s function, e.g.\\n' \\ % msg_blue('results()') msg +=", "not installed within cmssh, proceed' \\ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH']", "return email = raw_input('Your Email : ') if not email: msg = \"You", "cmsRun command for release in question\\n' msg += '\\nAvailable GRID commands: <cmd> either", "CMSSW' return 'no match' def get_apt_init(arch): \"Return proper apt init.sh for given architecture\"", "of CMS JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s'", "def cms_info(arg): \"\"\" cmssh info command provides information for given meta-data entity, e.g.", "0 das_client(host, query, idx, limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh command which", "default='N'): return email = raw_input('Your Email : ') if not email: msg =", "list\\n' msg += ', '.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg", "'list': print_info('Local data transfer') dqueue(arg) elif arg == 'dashboard': userdn = os.environ.get('USER_DN', None)", "instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if arg:", "debug, background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW architectures", "os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src'))", "stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst)", "to RunSummary service please ensure that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\"", "name else: cms_archs = cms_architectures('all') if arg not in cms_archs: msg = 'Wrong", "arg.strip() ipth = get_ipython() if arg == '': print_info(\"Verbose level is %s\" %", "= '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list =", "= '\\nYou are not allowed to install new release, ' msg += 'since", "import osparameters, check_voms_proxy, run, user_input from cmssh.utils import execmd, touch, platform, fix_so from", "= os.getcwd() # check if src still has options and user asked for", "/a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH']", "= 'In order to run crab command you must ' msg += 'run", "results from recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which lists", "architecture, please choose from the following list\\n' msg += ', '.join(cms_archs) raise Exception(msg)", "0. \"\"\" arg = arg.strip() if arg: if arg not in ['0', '1']:", "' msg += 'since cmssh was installed with system CMSSW install area' print", "'EOF': break desc += uinput + '\\n' except KeyboardInterrupt: break if not desc:", "msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg", "\"Installing cms+cmssw+%s ...\" % rel cmd = 'source %s; apt-get install cms+cmssw+%s' %", "RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration object for given dataset", "sename, 'rm file.root', 'cp %s file1.root &' % lfn, 'cp %s file2.root &'", "if res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url']) title = 'cmssh gist %s'", "== 'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create", "arg.strip() if pat_site.match(arg): lookup(arg) else: cmd = 'du ' + arg cmd =", "in results(): print r, type(r)\\n' msg += '\\nList cmssh commands : ' +", "is an alias to: eval `scramv1 runtime -sh`' msg = 'Within cmssh it", "0 if not arg: print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1] if os.path.exists(dst)", "arg == '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' %", "\"eval `scramv1 runtime -sh`; %s\" % cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\"", "object for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg =", "orig_arg.find('>') != -1: arg, out = orig_arg.split('>', 1) out = out.strip() arg =", "if src still has options and user asked for -f options = src.split('", "data-service with provided query. Examples: cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx", "def cms_install(rel): \"\"\" cmssh command to install given CMSSW release. Examples: cmssh> install", "rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib',", "or CMS jobs\\n' msg += msg_green('read ') \\ + ' read URL/local file", "if os.path.exists(arg): run(\"rmdir %s\" % arg) else: try: status = rmdir(arg, verbose=debug) print_status(status)", "stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information about", "os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script", "14 # get details for given ticket id cmssh> ticket new # post", "arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug)", "cmssh> arch all # show all known CMSSW architectures cmssh> arch list #", "arg = arg.replace('site=', '') res = list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=',", "lookup(arg): \"\"\" Perform lookup of given query in CMS data-services. \"\"\" arg =", "files/dirs/CMS storate elements or CMS entities (se, site, dataset, block, run, release, file).", "arg def cms_apt(arg=''): \"Execute apt commands\" if '-cache' in arg or '-get' in", "= arg if orig_arg.find('>') != -1: arg, out = orig_arg.split('>', 1) out =", "os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment cmd = 'eval `scramv1 runtime", "os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]):", "known CMSSW architectures cmssh> arch list # show all CMSSW architectures for given", "not work_area: msg = 'In order to run crab command you must '", "\"\"\" cmssh cp command copies local files/dirs to/from local files/dirs or CMS storate", "online, tests, etc. \"\"\" if arg: print \"CMSSW releases for %s platform\" %", "arg = arg.strip() else: flt = None if arg: arg = arg.strip() if", "install external+fakesystem+1.0; ' cmd += 'apt-get update; ' msg = 'Initialize %s apt", "os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities',", "user area for %s release ...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\"", "arg string\"\"\" opts = [] for par in arg.split(): if len(par) > 0", "os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) #", "+= \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\ + ' command to install", "msg_red(rel) msg += ' is not yet installed on your system.\\n' msg +=", "msg = \"You did your email address\" print_error(msg) return desc = '' msg", "%s' % sename, ] cmd_list += ['cp %s file.root' % lfn, 'ls', 'cp", "cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user", "args=''): \"Execute given command in current shell environment\" cmd = '%s %s' %", "else: msg = '%s/%s rejected by user' % (rel, arch) output.append(msg) if output:", "feature if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel)", "\\ + ' status of job queue or CMS jobs\\n' msg += msg_green('read", "= arg.strip() else: flt = None startswith = None entities = \\ ['se',", "arg.replace('lfn=', '') res = file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=', '') res", "split[0] args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info command provides", "args=''): \"Execute given command in original shell environment\" cmd = '%s %s' %", "CMS releases, including online, tests, etc. \"\"\" if arg: print \"CMSSW releases for", "None) if not rel or not work_area: msg = 'In order to run", "mgr = get_ipython() for item in cmd_list: print_info(\"Execute %s\" % item) split =", "get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import das_client from cmssh.url_utils", "cmd = \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def", "cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug", "cms_json(arg): \"Print or set location of CMS JSON file\" if arg: if access2file(arg):", "'') print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) if res: RESMGR.assign(res)", "def cmsexe(cmd): \"\"\" Execute given command within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR',", "msg += msg_green('ls ') \\ + ' list file/LFN, e.g. ls local.file or", "+ ' list available CMSSW releases, accepts <list|all> args\\n' msg += msg_green('install ')", "msg += ', '.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH']", "lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi", "\\ % os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS instances:' for inst in", "dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list", "instances:' for inst in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg =", "+= ' is not yet installed on your system.\\n' msg += 'Use '", "cmssh> releases # show installed CMSSW releases cmssh> releases list # list available", "list available releases.\\n' msg += 'Use ' + msg_green('install %s' % rel) msg", "['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in", "for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if arg ==", "print_info(\"Execute %s\" % item) split = item.split(' ', 1) if len(split) == 1:", "supported in CMSSW' return 'no match' def get_apt_init(arch): \"Return proper apt init.sh for", "+ msg_green('install %s' % rel) msg += ' command to install given release.'", "' remove file/LFN, ' \\ + 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg", "msg_green('du ') \\ + ' display disk usage for given site, e.g. du", "import json import glob import shutil import base64 import pprint import mimetypes import", "dbs_instance(arg=None): \"\"\" cmssh command to show or set DBS instance Examples: cmssh> dbs_instance", "else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for given release ipython =", "work_area: msg = 'In order to run crab command you must ' msg", "file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha", "'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch list', 'jobs', 'ls'] cmd_list", "local files/dirs/CMS storate elements or CMS entities (se, site, dataset, block, run, release,", "foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except:", "dqueue from cmssh.utils import list_results, check_os, unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy,", "msg return # check if given release/architecture is in place status = check_release_arch(rel)", "= 'ls ' + orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt)", "to install given CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip()", "' mkdir/rmdir command, ' \\ + 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg", "raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory from", "run=190704 cmssh> find user=oliver List of supported entities: dataset, block, file, run, lumi,", "of edm one, since # execute method will run in current shell environment", "list of releases installed on a system\" _osname, osarch = osparameters() releases =", "post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url']) title = 'cmssh", "https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and work area are set (should", "in CMSSW' return 'no match' def get_apt_init(arch): \"Return proper apt init.sh for given", "file.root', 'cp %s file1.root &' % lfn, 'cp %s file2.root &' % lfn2,", "(script, rel) subprocess.call(cmd, shell=True) # use subprocess due to apt-get interactive feature if", "msg = 'Initialize %s apt repository ...' % arch run(cmd, sdir, msg=msg, debug=debug,", "level is %s\" % DEBUG.level) def debug_http(arg): \"\"\" Show or set HTTP debug", "== 'all': print 'CMSSW architectures:' else: print 'CMSSW architectures for %s:' \\ %", "under %s' \\ % (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg =", "your email address\" print_error(msg) return desc = '' msg = 'Type your problem,", "help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg)", "\"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if not vdir or", "cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if arg: if validate_dbs_instance(arg):", "Examples: cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit =", "voms-proxy-info)\\n' msg += '\\nQuery results are accessible via %s function, e.g.\\n' \\ %", "command in front of edm one, since # execute method will run in", "-key %s -cert %s\" % (key, cert) run(cmd) userdn = os.environ.get('USER_DN', '') if", "msg += msg_green('jobs ') \\ + ' status of job queue or CMS", "= 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0 res =", "= \"\\nYou don't have yet CMSSW release installed on your system.\" msg +=", "'DataFormats']: pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py'))", "given site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once you install", "msg_green('releases') msg += ' command to list available releases.\\n' msg += 'Use '", "arg print \"Switch to %s DBS instance\" % arg else: print \"Invalid DBS", "to use cmsenv\\n' msg += 'please use ' + msg_green('cmsrel') + ' command", "and ' msg += 'CMS release environment will be set for you' print_info(msg)", "os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in %s' %", "pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information, site=%s' % site) res = jobsummary({'site':", "install given release.' print msg return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch", "'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link", "# show current and installed architecture(s) cmssh> arch all # show all known", "os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if", "else: try: status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh", "xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path:", "# cmssh modules from cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint import print_warning,", "def cms_read(arg): \"\"\" cmssh command to read provided HTML page (by default output", "print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils", "in JSON format\\n' msg += msg_green('jobs ') \\ + ' status of job", "= arg.strip() ipth = get_ipython() if arg == '': print_info(\"Verbose level is %s\"", "lists local files/dirs/CMS storate elements or CMS entities (se, site, dataset, block, run,", "for case in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func: doc =", "%s\" % arg) else: try: status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def", "commands cmssh> cmshelp ls \"\"\" if arg: if arg.strip() == 'commands': cms_commands() return", "if arg == '0' or arg == 'None' or arg == 'False': if", "None if arg: arg = arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\" List", "cmssh commands : ' + msg_blue('commands') msg += '\\ncmssh command help : '", "if arg: arg = arg.strip() if not arg or arg == 'list': print_info('Local", "= path for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg)", "for item in archs: print item elif arg == 'all' or arg ==", "cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc", "cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg", "validate release name \"%s\"' % rel print_error(msg) msg = 'Please check the you", "file/LFN, e.g. ls local.file or ls /store/user/file.root\\n' msg += msg_green('rm ') + '", "for a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi", "desc += uinput + '\\n' except KeyboardInterrupt: break if not desc: msg =", "= 0 orig_arg = arg if orig_arg.find('>') != -1: arg, out = orig_arg.split('>',", "print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command lists local files/dirs/CMS storate", "'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to", "shell=True) def lookup(arg): \"\"\" Perform lookup of given query in CMS data-services. \"\"\"", "'ls %s' % sename, 'rm file.root', 'cp %s file1.root &' % lfn, 'cp", "arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s rejected by user' % (rel,", "mtype[0]: print \"Mime type:\", mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic:", "arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to %s DBS instance\" %", "func: doc = func.func_doc break else: doc = 'Documentation is not available' else:", "read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug = get_ipython().debug except: debug =", "(os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s -name init.sh | tail -1`;'", "query in CMS data-services. \"\"\" arg = arg.strip() debug = get_ipython().debug args =", "user=oliver List of supported entities: dataset, block, file, run, lumi, site, user \"\"\"", "# shows current setting cmssh> pager None # set pager to nill \"\"\"", "CMS entities (se, site, dataset, block, run, release, file). Examples: cmssh> ls #", "= get_ipython() for item in cmd_list: print_info(\"Execute %s\" % item) split = item.split('", "unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy, run, user_input from cmssh.utils import execmd,", "eval `scramv1 runtime -sh`' msg = 'Within cmssh it is not required to", "True # CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch", "arg == '0': ipth.debug = False else: ipth.debug = True # CMSSW commands", "if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file", "user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration object", "cmssh command which executes voms-proxy-init on behalf of the user Examples: cmssh> vomsinit", "% arg) else: try: status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg):", "res['html_url']) title = 'cmssh gist %s' % res['html_url'] if isinstance(res, dict): ticket =", "apt repository ...' % arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return", "not rel or rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir", "if given release/architecture is in place status = check_release_arch(rel) if status != 'ok':", "msg += \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\ + ' command to", "CMS jobs\\n' msg += msg_green('read ') \\ + ' read URL/local file content\\n'", "'\\ncmssh command help : ' + msg_blue('cmshelp <command>') msg += '\\nInstall python software:", "ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg = arg.strip() res = [] try:", "commands, e.g. ls, cp, supported in cmssh. \"\"\" # system modules import os", "= dyld_path #def debug(arg): # \"\"\" # debug shell command # \"\"\" #", "+ ' command' print_error(msg) return # check existence of crab.cfg crab_dir = os.path.join(work_area,", "voms\\n' msg += msg_green('vomsinit ') \\ + ' setup your proxy (aka voms-proxy-init)\\n'", "\\ + ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ +", "information, site=%s' % site) res = jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=',", "except: traceback.print_exc() return try: debug = get_ipython().debug except: debug = 0 if not", "mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0] ipython = get_ipython() magic", "run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '') res = list_se(arg, debug) elif", "release ...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv", "arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to %s DBS", "else: if not os.path.exists(dst): print_error('File %s does not exists' % dst) else: raise", "link = '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link,", "cmssh> pager # shows current setting cmssh> pager None # set pager to", "msg = \"You did not provide bug description\" print_error(msg) return if not user_input('Send", "if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not rel_arch: msg = 'Release", "% status print msg return print \"Searching for %s\" % rel script =", "else: msg = \"DBS instance is set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global')", "remove file/LFN, ' \\ + 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg +=", "msg += msg_green('scram ') + ' CMSSW scram command\\n' msg += msg_green('cmsRun ')", "dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in ['external', 'lib']: link", "\\ + ' list file/LFN, e.g. ls local.file or ls /store/user/file.root\\n' msg +=", "else: flt = None if arg: arg = arg.strip() if not arg or", "site - dashboard, which lists jobs of current user - user, which lists", "done print ' msg += msg_blue('EOF') + ' and hit ' + msg_blue('Enter')", "file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background", "arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\" % arg run(cmd) else:", "msg_red, msg_green, msg_blue from cmssh.iprint import print_warning, print_error, print_status, print_info from cmssh.filemover import", "not os.path.exists(dst): print_error('File %s does not exists' % dst) else: raise Exception('Not implemented", "'.': dst = os.getcwd() # check if src still has options and user", "msg def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh>", "command within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None)", "-x %s/bootstrap.sh setup -path %s -arch %s' % (swdir, swdir, arch) if unsupported_linux():", "in place status = check_release_arch(rel) if status != 'ok': msg = '\\nCheck release", "switch to given CMSSW release and setup its environment\\n' msg += msg_green('arch ')", "msg += '\\nQuery results are accessible via %s function, e.g.\\n' \\ % msg_blue('results()')", "storage element. Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip()", "lfn] cmd_list += ['find lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi", "for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir)", "one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your", "gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res,", "== '0' or arg == 'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del", "not available' else: doc = cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS rm", "and par[0] == '-': opts.append(par) return opts class Magic(object): \"\"\" Class to be", "mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug", "overwrite = True else: overwrite = False except: traceback.print_exc() return try: debug =", "cms_jobs(arg=None): \"\"\" cmssh jobs command lists local job queue or provides information about", "'Please check the you provide correct release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>'", "% sename, 'mkdir %s/foo' % sename, 'ls %s' % sename, 'rmdir %s/foo' %", "dst, debug, background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW", "print_info(\"Debug level is %s\" % DEBUG.level) def debug_http(arg): \"\"\" Show or set HTTP", "cmshelp ls \"\"\" if arg: if arg.strip() == 'commands': cms_commands() return ipython =", "and access2file(fname): with open(fname, 'r') as cms_json: print cms_json.read() def integration_tests(_arg): \"Run series", "'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in cmd_list:", "% (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg", "= 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '')", "arg.replace('site=', '') print_info('Dashboard information, site=%s' % site) res = jobsummary({'site': site}) elif pat_user.match(arg):", "'') if not userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN']", "on local filesystem or remote CMS storage element. Examples: cmssh> mkdir foo cmssh>", "= 'ls ' + orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '')", "for %s release ...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in", "provide bug description\" print_error(msg) return if not user_input('Send this ticket', default='N'): print_info('Aborting your", "release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check if", "os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager to %s\" % arg", "% dataset, 'ls run=%s' % run, 'ls file=%s' % lfn] cmd_list += ['ls", "r, type(r)\\n' msg += '\\nList cmssh commands : ' + msg_blue('commands') msg +=", "arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set", "of releases installed on a system\" _osname, osarch = osparameters() releases = []", "msg = 'In order to run crab command you must ' msg +=", "set to: %s\" % val print msg def dbs_instance(arg=None): \"\"\" cmssh command to", "ls local.file or ls /store/user/file.root\\n' msg += msg_green('rm ') + ' remove file/LFN,", "to read provided HTML page (by default output dumps via pager) Examples: cmssh>", "if check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check release/architecture\" # check if", "cmd = \"scramv1 project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get", "res = site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg = arg.replace('lfn=',", "\"\"\" cmssh info command provides information for given meta-data entity, e.g. dataset, block,", "dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return", "(aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ + ' show your proxy info", "init.sh for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs =", "name) return script def cms_install(rel): \"\"\" cmssh command to install given CMSSW release.", "dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg):", "['external', 'lib']: link = '%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s' %", "rel.strip() pat = pat_release if not pat.match(rel): msg = 'Fail to validate release", "archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1:", "executes voms-proxy-init on behalf of the user Examples: cmssh> vomsinit By default it", "and not pat.match(dst): if background: cmd = 'cp %s' % orig_arg subprocess.call(cmd, shell=True)", "'\\nList cmssh commands : ' + msg_blue('commands') msg += '\\ncmssh command help :", "working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms -key %s -cert", "set edm utils for given release ipython = get_ipython() rdir = '%s/bin/%s' %", "following list of options: - list, which lists local transfer jobs - site,", "idx, limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh command which queries DAS data-service", "= get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) elif pat_site.match(arg):", "releases: releases.sort() print \"\\nInstalled releases:\" for rel in releases: print rel else: msg", "= arg def cms_apt(arg=''): \"Execute apt commands\" if '-cache' in arg or '-get'", "options -rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem')", "cert) run(cmd) userdn = os.environ.get('USER_DN', '') if not userdn: cmd = \"voms-proxy-info -identity\"", "get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path", ": ' + msg_blue('cmshelp <command>') msg += '\\nInstall python software: ' + \\", "run=%s' % run, 'ls file=%s' % lfn] cmd_list += ['ls %s' % dataset,", "at give site or for given user. It accepts the following list of", "CMSSW commands (once you install any CMSSW release):\\n' msg += msg_green('releases ') \\", "jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res = None try: debug = get_ipython().debug", "%s -name init.sh | tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd =", "name \"%s\"' % rel print_error(msg) msg = 'Please check the you provide correct", "\"Check release/architecture\" # check if given release name is installed on user system", "pat_lfn.match(arg): arg = arg.replace('file=', '') arg = arg.replace('lfn=', '') res = file_info(arg, debug)", "<list> or <all> Examples: cmssh> releases # show installed CMSSW releases cmssh> releases", "debug) def cms_json(arg): \"Print or set location of CMS JSON file\" if arg:", "userdn = os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' %", "#-*- coding: ISO-8859-1 -*- #pylint: disable-msg=W0702 \"\"\" Set of UNIX commands, e.g. ls,", "= '%s/%s rejected by user' % (rel, arch) output.append(msg) if output: return ',", "print_info(msg) # check if release version and work area are set (should be", "file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915", "!= 'ok': msg = '\\nCheck release architecture status: %s' % status print msg", "= os.environ.get('SCRAM_ARCH', None) if not vdir or not arch: msg = 'Unable to", "DBS instance, default is DBS global instance\\n' msg += msg_green('mkdir/rmdir ') + '", "% rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv is an", "'block', 'run', 'release', 'file'] for item in entities: if arg.startswith(item + '='): startswith", "cmd_list += ['ls %s' % sename, 'mkdir %s/foo' % sename, 'ls %s' %", "check if release version and work area are set (should be set at", "be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if", "% (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite'", "modules import os import re import sys import time import json import glob", "platform, fix_so from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs import", "command, it setups CMSSW environment and creates user based directory structure. Examples: cmssh>", "cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv is an alias to: eval `scramv1", "['find lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list +=", "copies local files/dirs to/from local files/dirs or CMS storate elements. Examples: cmssh> cp", "arg = arg.replace('release=', '') res = release_info(arg, debug) elif startswith: msg = 'No", "area are set (should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area", "%s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup of given query in", "None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp", "IndexError: msg = \"Given pattern '%s' does not exist on local filesystem or", "= func.func_doc break else: doc = 'Documentation is not available' else: doc =", "arg == '': print_info(\"Verbose level is %s\" % ipth.debug) else: if arg ==", "cmd_list += ['find config dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell']", "= arg.replace('site=', '') print_info('Dashboard information, site=%s' % site) res = jobsummary({'site': site}) elif", "def demo(_arg=None): \"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO')", "file:///%s' % fname else: cmd = '-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd)", "% crab_dir print_warning(msg) msg = 'Would you like to create one' if user_input(msg,", "lists jobs at given site - dashboard, which lists jobs of current user", "= raw_input('Your Email : ') if not email: msg = \"You did your", "%s' % (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given command in original", "from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import read from cmssh.dashboard import jobsummary", "= 'source %s; apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) # use", "print msg return # check if given release/architecture is in place status =", "msg def dbs_instance(arg=None): \"\"\" cmssh command to show or set DBS instance Examples:", "find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config", "in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' % (idir,", "T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg = arg.strip() res = [] try: debug", "or for given user. It accepts the following list of options: - list,", "% dataset, 'info %s' % dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find", "ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/': cmd = '-e -f file:///%s' %", "if arg[0] == '/': cmd = '-e -f file:///%s' % fname else: cmd", "rel cmd = 'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel) else: print", "{190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except: debug = 0 arg =", "file, run. Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915 cmssh>", "or arg == 'list': if arg == 'all': print 'CMSSW architectures:' else: print", "DAS data-service with provided query. Examples: cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch'", "arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs = [] for name in", "% (pcre_init, gcc_init, root_init) cmd = '%s root -l %s' % (pkgs_init, arg.strip())", "mkdir/rmdir command, ' \\ + 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg +=", "= '%s|\\#> ' % prompt return # check if given release name is", "in arg or '-get' in arg: cmd = 'apt%s' % arg else: msg", "pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s %s %s' % (pcre_init, gcc_init, root_init)", "'source %s; apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) # use subprocess", "overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka SCRAM_ARCH)\"", "'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ + '", "% arg else: print \"Invalid DBS instance\" else: msg = \"DBS instance is", "search %s | grep -v -i fwlite' % (script, rel) run(cmd) if rel.lower().find('patch')", "+ ' query DAS and return data in JSON format\\n' msg += msg_green('jobs", "cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except:", "= [] for par in arg.split(): if len(par) > 0 and par[0] ==", "+= ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s' % sename, 'mkdir %s/foo'", "pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import architectures", "args = '' else: cmd = split[0] args = split[-1] mgr.run_line_magic(cmd, args) def", "= arg.replace('site=', '') res = list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=', '')", "except: debug = 0 if not arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg):", "rejected by user' % (rel, arch) output.append(msg) if output: return ', '.join(output) osname,", "dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915 cmssh> info local_file.root Please note: to", "\\ + ' copy file/LFN, e.g. cp local.file or cp /store/user/file.root .\\n' msg", "# we use Magic(cmd).execute we don't need # to add scramv1 command in", "' query DAS service\\n' msg += msg_green('das_json ') \\ + ' query DAS", "% msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg += ' for r in", "dst)) else: try: status = copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except: traceback.print_exc()", "releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item in releases: rel_arch =", "arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func: doc", "demo_file: print demo_file.read() def results(): \"\"\"Return results from recent query\"\"\" return RESMGR def", "required to use cmsenv\\n' msg += 'please use ' + msg_green('cmsrel') + '", "arg: if arg.strip() == 'commands': cms_commands() return ipython = get_ipython() if arg[0] ==", "from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github", "ROOT within cmssh Examples: cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init =", "runtime -sh`' msg = 'Within cmssh it is not required to use cmsenv\\n'", "+= 'run ' + msg_blue('cmsrel') + ' command' print_error(msg) return # check existence", "access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS", "lfn, 'ls', 'cp file.root %s' % sename, 'ls %s' % sename, 'rm %s/file.root'", "not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' %", "cmd = item args = '' else: cmd = split[0] args = split[-1]", "0 and par[0] == '-': opts.append(par) return opts class Magic(object): \"\"\" Class to", "% (script, rel) else: print \"Installing cms+cmssw+%s ...\" % rel cmd = 'source", "arg: return try: debug = get_ipython().debug except: debug = 0 fname = arg.replace('file=',", "edm one, since # execute method will run in current shell environment #", "installed architecture(s) cmssh> arch all # show all known CMSSW architectures cmssh> arch", "its environment\\n' msg += msg_green('arch ') \\ + ' show or switch to", "Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute", "msg_green('scram ') + ' CMSSW scram command\\n' msg += msg_green('cmsRun ') \\ +", "verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command lists local files/dirs/CMS", "'ls ' + orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def", "releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir =", "= 'Within cmssh it is not required to use cmsenv\\n' msg += 'please", "+ msg_red(rel) msg += ' is not yet installed on your system.\\n' msg", "os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt commands\" if '-cache' in arg or", "pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s %s %s' %", "rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']:", "if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \\ %", "create one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg =", "in JSON data format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx", "%s\" % cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB command, help", "at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel", "cmssh ls command lists local files/dirs/CMS storate elements or CMS entities (se, site,", "SCRAM_ARCH)\" archs = [a for a in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\"", "shell environment # old command for reference: # cmd = \"eval `scramv1 runtime", "CMSSW release installed on your system.\" msg += \"\\nPlease use \" + msg_green('install", "old command for reference: # cmd = \"eval `scramv1 runtime -sh`; %s\" %", "%s does not exists' % dst) else: raise Exception('Not implemented yet') def cms_rmdir(arg):", "arch: msg = 'Unable to identify CMSSW environment, please run first: ' msg", "cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n' msg += msg_green('find ') \\", "[arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func: doc = func.func_doc break else:", "(pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to run ROOT xrdcp via", "environment\" cmd = '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print", "SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is", "script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search %s | grep -v", "in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1: archs.append(name) if archs: print '\\nInstalled", "if we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not", "elif startswith: msg = 'No pattern is allowed for %s look-up' % startswith", "lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst)", "to nill \"\"\" arg = arg.strip() if arg: if arg == '0' or", "\"\"\" Perform lookup of given query in CMS data-services. \"\"\" arg = arg.strip()", "msg += msg_green('install ') \\ + ' install CMSSW release, e.g. install CMSSW_5_0_0\\n'", "'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel) else: print \"Installing cms+cmssw+%s ...\"", "&' % lfn, 'cp %s file2.root &' % lfn2, 'ls'] cmd_list += ['find", "'-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname)", "information about jobs at give site or for given user. It accepts the", "%s' % sename, 'ls %s' % sename, 'rm %s/file.root' % sename, 'ls %s'", "os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg run(cmd, shell=True) elif pat_se.match(arg):", "das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command", "'%s/%s rejected by user' % (rel, arch) output.append(msg) if output: return ', '.join(output)", "= arg.strip() try: debug = get_ipython().debug except: debug = 0 if not arg:", "= get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('>') != -1:", "run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to run ROOT xrdcp via cmssh shell", "email = raw_input('Your Email : ') if not email: msg = \"You did", "'OSX') for name in cms_architectures('all'): if arg == 'all': print name else: if", "= '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s -name", "% dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s' %", "pattern is allowed for %s look-up' % startswith print_error(msg) else: cmd = 'ls", "cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB command, help is available", "runtime -sh`; %s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set", "= cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup of given query in", "cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in cmd_list: print_info(\"Execute %s\"", "+= msg_green('find ') \\ + ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg +=", "new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh", "install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ + '", "lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915", "os.environ.get('USER_DN', '') if not userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd)", "= \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None):", "from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import", "print rel else: msg = \"\\nYou don't have yet CMSSW release installed on", "crab command you must ' msg += 'run ' + msg_blue('cmsrel') + '", "' + msg_blue('cmsrel') + ' command' print_error(msg) return # check existence of crab.cfg", "'ls -l', 'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s'", "'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file'] for item in entities: if arg.startswith(item", "cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options from given", "problem, attach traceback, etc. Once done print ' msg += msg_blue('EOF') + '", "rel # final message print \"%s is ready, cwd: %s\" % (rel, os.getcwd())", "\"Given pattern '%s' does not exist on local filesystem or in DBS\" %", "init.sh | tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd = '' return", "'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg):", "info run=160915\\n' msg += msg_green('das ') + ' query DAS service\\n' msg +=", "for given user. It accepts the following list of options: - list, which", "sys import time import json import glob import shutil import base64 import pprint", "rel in releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command to source", "apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg", "list', 'arch list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for", "'\\nInstall python software: ' + \\ msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None):", "except: debug = 0 orig_arg = arg if orig_arg.find('|') != -1: arg, flt", "res = None try: debug = get_ipython().debug except: debug = 0 orig_arg =", "to show or set DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\"", "user Examples: cmssh> jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard", "- list, which lists local transfer jobs - site, which lists jobs at", "import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import get_tickets,", "environment # old command for reference: # cmd = \"eval `scramv1 runtime -sh`;", "release, ' msg += 'since cmssh was installed with system CMSSW install area'", "the you provide correct release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg", "disable-msg=W0702 \"\"\" Set of UNIX commands, e.g. ls, cp, supported in cmssh. \"\"\"", "[] for arch, status in get_release_arch(rel): if not status: msg = '%s release", "file found in %s' % crab_dir print_warning(msg) msg = 'Would you like to", "please run first: ' msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases =", "cmsexe(cmd): \"\"\" Execute given command within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None)", "desc}} res = post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url'])", "from cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname def", "of given user Examples: cmssh> jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh>", "a method to execute it in a shell \"\"\" def __init__(self, cmd): self.cmd", "% sename, 'rm file.root', 'cp %s file1.root &' % lfn, 'cp %s file2.root", "limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init", "releases.append('%s/%s' % (rel, idir)) if releases: releases.sort() print \"\\nInstalled releases:\" for rel in", "res = run_lumi_info(arg, debug) def cms_json(arg): \"Print or set location of CMS JSON", "command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq'", "except: debug = 0 if not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat", "= \"cmssh pager is set to: %s\" % val print msg def dbs_instance(arg=None):", "place status = check_release_arch(rel) if status != 'ok': msg = '\\nCheck release architecture", "from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter,", "'dataset', 'block', 'run', 'release', 'file'] for item in entities: if arg.startswith(item + '='):", "have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not allowed to", "show all known CMS releases, including online, tests, etc. \"\"\" if arg: print", "os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1", "os.environ.get('SCRAM_ARCH', None) if not vdir or not arch: msg = 'Unable to identify", "def cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv is an alias to: eval", "'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch list', 'jobs', 'ls'] cmd_list += ['read", "read(arg, out, debug) def cms_releases(arg=None): \"\"\" List available CMS releases. Optional parameters either", "- user, which lists jobs of given user Examples: cmssh> jobs cmssh> jobs", "\\ + ' provides detailed info about given CMS entity, ' \\ +", "ipython magic functions. It holds given command and provide a method to execute", "return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}}", "% cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB command, help is", "not arch: msg = 'Unable to identify CMSSW environment, please run first: '", "dataset=/*Zee*\\n' msg += ' for r in results(): print r, type(r)\\n' msg +=", "doc = 'Documentation is not available' else: doc = cms_help_msg() print doc def", "page (by default output dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read", "['ls dataset=%s' % dataset, 'ls run=%s' % run, 'ls file=%s' % lfn] cmd_list", "def cms_vomsinit(_arg=None): \"\"\" cmssh command which executes voms-proxy-init on behalf of the user", "%s\" % os.environ['SCRAM_ARCH'] archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and", "by user' % (rel, arch) output.append(msg) if output: return ', '.join(output) osname, osarch", "\\ + ' install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ')", "= pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s %s %s'", "msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: '", "CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def", "get_ipython().debug except: debug = 0 fname = arg.replace('file=', '') if arg and os.path.isfile(fname):", "msg_green(', '.join(releases)) print msg return cmd = \"eval `scramv1 runtime -sh`; %s\" %", "T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ + ' list file/LFN, e.g. ls local.file", "from cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely,", "e.g.\\n' \\ % msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg += ' for", "dst) else: raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command removes", "arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for given CMSSW", "src, dst = arg.rsplit(' ', 1) if dst.find('&') != -1: background = True", "the following options -rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert =", "or set location of CMS JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON'] =", "= arg.strip() if arg: if arg not in ['0', '1']: print_error('Please provide 0/1", "user - user, which lists jobs of given user Examples: cmssh> jobs cmssh>", "for rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases: releases.sort() print \"\\nInstalled", "def cms_cp(arg): \"\"\" cmssh cp command copies local files/dirs to/from local files/dirs or", "block, run, release, file). Examples: cmssh> ls # UNIX command cmssh> ls -l", "+ ' read URL/local file content\\n' msg += msg_green('root ') + ' invoke", "= osparameters() releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1:", "[str(r) for r in res] releases = list(set(releases)) releases.sort() for rel in releases:", "= '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir):", "shutil import base64 import pprint import mimetypes import traceback import subprocess # cmssh", "elements or CMS entities (se, site, dataset, block, run, release, file). Examples: cmssh>", "os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2", "= 0 if not arg: print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1] if", "shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '') res = list_se(arg, debug) elif pat_site.match(arg):", "res = list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=', '') res = site_info(arg,", "\"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug = 0 if", "JSON format\\n' msg += msg_green('jobs ') \\ + ' status of job queue", "') \\ + ' list available CMSSW releases, accepts <list|all> args\\n' msg +=", "import print_warning, print_error, print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se,", "in cmd_list: print_info(\"Execute %s\" % item) split = item.split(' ', 1) if len(split)", "run. Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915 cmssh> info", "jobs command lists local job queue or provides information about jobs at give", "cms_pager(arg=None): \"\"\" cmssh command to show or set internal pager Examples: cmssh> pager", "else: raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory", "cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit = 0", "-i fwlite' % (script, rel) run(cmd) if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s", "debug) elif pat_block.match(arg): arg = arg.replace('block=', '') res = block_info(arg, debug) elif pat_dataset.match(arg):", "not provide bug description\" print_error(msg) return if not user_input('Send this ticket', default='N'): print_info('Aborting", "pat_block from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import architectures as", "msg_green('dbs_instance') \\ + ' show/set DBS instance, default is DBS global instance\\n' msg", "anonymous gist ticket' print_info(msg) if not user_input('Proceed', default='N'): return email = raw_input('Your Email", "'rm file.root', 'cp %s file1.root &' % lfn, 'cp %s file2.root &' %", "ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command copies local files/dirs", "rel.strip() if not rel or rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT']", "in arg.split(): if len(par) > 0 and par[0] == '-': opts.append(par) return opts", "+= ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg = 'Bootstrap", "= os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if", "= \"eval `scramv1 runtime -sh`; %s\" % cmd run(cmd, shell=True, call=True) def cmscrab(arg):", "rel cmd = 'source %s; apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True)", "https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg = 'You can post new ticket", "(once you install any CMSSW release):\\n' msg += msg_green('releases ') \\ + '", "cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list +=", "for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup command, it setups CMSSW", "jobs\\n' msg += msg_green('read ') \\ + ' read URL/local file content\\n' msg", "background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka", "in cms_archs: msg = 'Wrong architecture, please choose from the following list\\n' msg", "via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\"", "') \\ + ' copy file/LFN, e.g. cp local.file or cp /store/user/file.root .\\n'", "\"\"\" try: debug = get_ipython().debug except: debug = 0 orig_arg = arg if", "= orig_arg.split('|', 1) arg = arg.strip() else: flt = None startswith = None", "\"\"\" if not arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs = []", "all # show all known CMSSW architectures cmssh> arch list # show all", "install any CMSSW release):\\n' msg += msg_green('releases ') \\ + ' list available", "'.join(output) osname, osarch = osparameters() if osname == 'osx' and osarch == 'ia32':", "cmsrel to setup CMSSW environment. \"\"\" cmd = 'cmsRun %s' % arg cmsexe(cmd)", "block, file, run. Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915", "cms:/cms -key %s -cert %s\" % (key, cert) run(cmd) userdn = os.environ.get('USER_DN', '')", "CMSSW cmsRun command. Requires cmsrel to setup CMSSW environment. \"\"\" cmd = 'cmsRun", "query DAS service\\n' msg += msg_green('das_json ') \\ + ' query DAS and", "dir\") if arg.find(':') == -1: # not a SE:dir pattern run(\"mkdir %s\" %", "if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg) else: fname = os.environ.get('CMS_JSON')", "!= -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel", "# check if src still has options and user asked for -f options", "fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 =", "= get_ipython() if arg == '': print_info(\"Verbose level is %s\" % ipth.debug) else:", "try: status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls", "not user_input('Proceed', default='N'): return email = raw_input('Your Email : ') if not email:", "for item in cmd_list: print_info(\"Execute %s\" % item) split = item.split(' ', 1)", "run(cmd) if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\" % rel cmd =", "like to create one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig())", "copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils import list_results, check_os, unsupported_linux, access2file", "dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find user=oliver List of supported entities: dataset,", "(rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname = os.path.join(reldir,", "command creates directory on local filesystem or remote CMS storage element. Examples: cmssh>", "+= msg_green('dbs_instance') \\ + ' show/set DBS instance, default is DBS global instance\\n'", "return try: debug = get_ipython().debug except: debug = 0 fname = arg.replace('file=', '')", "find dataset=/*Zee*\\n' msg += ' for r in results(): print r, type(r)\\n' msg", "or set CMSSW architecture. Optional parameters either <all> or <list> Examples: cmssh> arch", "None): msg = '\\nYou are not allowed to install new release, ' msg", "% pkg_dir if not os.path.isdir(pkg_dir): cmd = '' return cmd def cms_root(arg): \"\"\"", "['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch list', 'jobs',", "cmssh> info run=160915 cmssh> info local_file.root Please note: to enable access to RunSummary", "== 'osx' and osarch == 'ia32': return 'OSX/ia32 is not supported in CMSSW'", "list(set(releases)) releases.sort() for rel in releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS", "registered cmssh commands in current shell. Examples: cmssh> cmshelp commands \"\"\" mdict =", "+ '\\n' except KeyboardInterrupt: break if not desc: msg = \"You did not", "platform cmssh> releases all # show all known CMS releases, including online, tests,", "= os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if not vdir or not arch:", "= os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup -path %s -arch %s' %", "arg.replace('file=', '') arg = arg.replace('lfn=', '') res = file_info(arg, debug) elif pat_block.match(arg): arg", "'ls %s' % sename, 'rmdir %s/foo' % sename, 'ls %s' % sename, ]", "command which queries DAS data-service with provided query and returns results in JSON", "rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command to source pkg environment\" pkg_dir =", "') + ' mkdir/rmdir command, ' \\ + 'e.g. mkdir /path/foo or rmdir", "+= ['ls %s' % sename, 'mkdir %s/foo' % sename, 'ls %s' % sename,", "== 1: cmd = item args = '' else: cmd = split[0] args", "ticket %s' % res['html_url']) title = 'cmssh gist %s' % res['html_url'] if isinstance(res,", "0 debug = 0 das_client(host, query, idx, limit, debug, 'plain') def cms_das_json(query): \"\"\"", "exists' % dst) else: raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir", "given command in current shell environment\" cmd = '%s %s' % (self.cmd, args.strip())", "def bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd =", "'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0 das_client(host, query, idx,", "def results(): \"\"\"Return results from recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh", "= list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=', '') res = site_info(arg, debug)", "ipth.debug = True # CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\" swdir =", "'').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug) def cms_json(arg):", "print_warning(msg) msg = '\\n%s/%s is not installed within cmssh, proceed' \\ % (rel,", "cms_ls(arg): \"\"\" cmssh ls command lists local files/dirs/CMS storate elements or CMS entities", "idx = 0 limit = 0 debug = 0 res = das_client(host, query,", "' command to list available releases.\\n' msg += 'Use ' + msg_green('install %s'", "%s\" % (key, cert) run(cmd) userdn = os.environ.get('USER_DN', '') if not userdn: cmd", "site=%s' % site) res = jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=', '')", "user}) elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information, site=%s' % site) res", "or not arch: msg = 'Unable to identify CMSSW environment, please run first:", "+ orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\"", "cmssh find command lookup given query in CMS data-services. Examples: cmssh> find dataset=/ZMM*", "% root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set", "install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) # use subprocess due to apt-get", "given CMSSW release and setup its environment\\n' msg += msg_green('arch ') \\ +", "%s' % sename, 'rmdir %s/foo' % sename, 'ls %s' % sename, ] cmd_list", "return archs def cms_arch(arg=None): \"\"\" Show or set CMSSW architecture. Optional parameters either", "'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs:", "last_arg == '&': background = True arg = arg.replace('&', '').strip() src, dst =", "no filter res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]:", "os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in %s' % crab_dir print_warning(msg) msg", "class Magic(object): \"\"\" Class to be used with ipython magic functions. It holds", "shell. Examples: cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k for", "pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import architectures as tc_architectures from cmssh.results import", "CMSSW releases cmssh> releases list # list available CMSSW releases on given platform", "command lists local files/dirs/CMS storate elements or CMS entities (se, site, dataset, block,", "release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for r in res] releases = list(set(releases))", "% (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not rel_arch:", "Show or set CMSSW architecture. Optional parameters either <all> or <list> Examples: cmssh>", "\"Mime type:\", mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0]", "about cmssh tickets, e.g. Examples: cmssh> tickets # list all cmssh tickets cmssh>", "\"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area)", "Default is 0. \"\"\" arg = arg.strip() if arg: if arg not in", "for name in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') == 0 and", "print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']):", "except: debug = 0 if not arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':')", "res = block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res =", "# check if we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou", "'') try: res = dataset_info(arg, debug) except IndexError: msg = \"Given pattern '%s'", "flag. Default is 0. \"\"\" arg = arg.strip() if arg: if arg not", "debug flag. Default is 0. \"\"\" arg = arg.strip() if arg: if arg", "demo_file.read() def results(): \"\"\"Return results from recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\"", "cmssh> du T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg) else: cmd =", "debug = 0 orig_arg = arg if orig_arg.find('>') != -1: arg, out =", "cmssh> ls # UNIX command cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh>", "while True: try: uinput = raw_input() if uinput.strip() == 'EOF': break desc +=", "print \"CMSSW releases for %s platform\" % platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res)", "dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel):", "arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is not installed within", "show/set DBS instance, default is DBS global instance\\n' msg += msg_green('mkdir/rmdir ') +", "= das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh", "'CMSSW') debug = 0 msg = 'Bootstrap %s ...' % arch # run", "-1: background = True dst = dst.replace('&', '').strip() if dst == '.': dst", "the user Examples: cmssh> vomsinit By default it applies the following options -rfc", "all cmssh tickets cmssh> ticket 14 # get details for given ticket id", "orig = src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst): if background: cmd =", "cmd = 'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to", "allowed to install new release, ' msg += 'since cmssh was installed with", "%s look-up' % startswith print_error(msg) else: cmd = 'ls ' + orig_arg run(cmd,", "rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug =", "# check if given release name is installed on user system rel_dir =", "= get_ipython().debug except: debug = 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg", "'CMS release environment will be set for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh", "set (should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA',", "+= msg_green('jobs ') \\ + ' status of job queue or CMS jobs\\n'", "or set HTTP debug flag. Default is 0. \"\"\" arg = arg.strip() if", "print 'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'):", "installed_releases(): \"Print a list of releases installed on a system\" _osname, osarch =", "directory from local file system or CMS storage element. Examples: cmssh> rmdir foo", "RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which lists all registered cmssh commands in", "'') res = list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=', '') res =", "for par in arg.split(): if len(par) > 0 and par[0] == '-': opts.append(par)", "release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ + ' switch to", "CMSSW scram command\\n' msg += msg_green('cmsRun ') \\ + ' cmsRun command for", "if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp", "item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check release/architecture\" # check", "if os.path.exists(orig) and not pat.match(dst): if background: cmd = 'cp %s' % orig_arg", "'') res = file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=', '') res =", "T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once you install any CMSSW release):\\n' msg", "run, lumi, site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility cmssh", "version and work area are set (should be set at cmsrel) rel =", "rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in CMS cmsenv is an alias", "== '-f': overwrite = True else: overwrite = False except: traceback.print_exc() return try:", "file_info, site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info,", "status = check_release_arch(rel) if status != 'ok': msg = '\\nCheck release architecture status:", "in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']:", "os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') ==", "0 debug = 0 res = das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res])", "if not user_input('Send this ticket', default='N'): print_info('Aborting your action') return key = '<KEY>'", "instance\" % arg else: print \"Invalid DBS instance\" else: msg = \"DBS instance", "system.\" msg += \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\ + ' command", "key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}} res", "pager to nill \"\"\" arg = arg.strip() if arg: if arg == '0'", "'%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in", "def cmscrab(arg): \"\"\" Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg", "file/LFN, ' \\ + 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp", "architecture, accept <list|all> args\\n' msg += msg_green('scram ') + ' CMSSW scram command\\n'", "or CMS storage element. Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg", "options: - list, which lists local transfer jobs - site, which lists jobs", "given site - dashboard, which lists jobs of current user - user, which", "check if we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are", "reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi info for a given dataset/file/block/lfn/run Examples:", "T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg) else: cmd = 'du '", "== '.': dst = os.getcwd() # check if src still has options and", "else: doc = 'Documentation is not available' else: doc = cms_help_msg() print doc", "cms_commands() return ipython = get_ipython() if arg[0] == '(' and arg[-1] == ')':", "cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import das_client from cmssh.url_utils import get_data, send_email", "edit it ' msg += 'appropriately and re-run crab command' print_info(msg) print \"cwd:\",", "cmsrel # reset CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython", "mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command lists local", "\"\"\" Return configuration object for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if", "are set (should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area =", "arg = arg.strip() debug = get_ipython().debug args = arg.split('|') if len(args) == 1:", "if check_os(name): print name else: cms_archs = cms_architectures('all') if arg not in cms_archs:", "if arg.startswith(item + '='): startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd =", "if arg: if arg == '0' or arg == 'None' or arg ==", "cmd def execute(self, args=''): \"Execute given command in current shell environment\" cmd =", "\"Execute given command in original shell environment\" cmd = '%s %s' % (self.cmd,", "'ls'] cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s' % run, 'ls file=%s'", "das_client from cmssh.url_utils import get_data, send_email from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block", "arg) # DEBUG.set(arg) # else: # print_info(\"Debug level is %s\" % DEBUG.level) def", "'cmssh gist %s' % res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res) else: ticket", "default it applies the following options -rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem>", "use subprocess due to apt-get interactive feature if platform() == 'osx': idir =", "to %s\" % arg) # DEBUG.set(arg) # else: # print_info(\"Debug level is %s\"", "msg += '\\nInstalled releases: ' + msg_green(', '.join(releases)) print msg return cmd =", "status: msg = '%s release is not officially supported under %s' \\ %", "%s' % crab_dir print_warning(msg) msg = 'Would you like to create one' if", "% user) res = jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard", "' show your proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery results are accessible", "lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' % dataset2] cmd_list", "cmd = '-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'):", "except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory on local filesystem", "'') arg = arg.replace('lfn=', '') res = file_info(arg, debug) elif pat_block.match(arg): arg =", "return if not user_input('Send this ticket', default='N'): print_info('Aborting your action') return key =", "arch # show current and installed architecture(s) cmssh> arch all # show all", "architecture. Optional parameters either <all> or <list> Examples: cmssh> arch # show current", "%s' % arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try:", "= os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms", "flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command lists local job queue or provides", "try: debug = get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('>')", "apt commands\" if '-cache' in arg or '-get' in arg: cmd = 'apt%s'", "CMSSW environment, please run first: ' msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n')", "\"You did your email address\" print_error(msg) return desc = '' msg = 'Type", "a in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show or set CMSSW architecture.", "alias to: eval `scramv1 runtime -sh`' msg = 'Within cmssh it is not", "print item elif arg == 'all' or arg == 'list': if arg ==", "None entities = \\ ['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file'] for", "rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for arch, status in", "== 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH", "given platform \"\"\" if not arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs", "-1`; ' \\ % (swdir, arch) cmd += 'apt-get install external+fakesystem+1.0; ' cmd", "os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try: debug = get_ipython().debug except: debug =", "= arg.rsplit(' ', 1) if dst.find('&') != -1: background = True dst =", "on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return", "False except: traceback.print_exc() return try: debug = get_ipython().debug except: debug = 0 if", "shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq", "= '%s|\\#> ' % rel # final message print \"%s is ready, cwd:", "%s/foo' % sename, 'ls %s' % sename, 'rmdir %s/foo' % sename, 'ls %s'", "os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root)", "len(options) > 1 and options[0] == '-f': overwrite = True else: overwrite =", "tests, etc. \"\"\" if arg: print \"CMSSW releases for %s platform\" % platform()", "DAS data-service with provided query and returns results in JSON data format Examples:", "don't have yet CMSSW release installed on your system.\" msg += \"\\nPlease use", "and creates user based directory structure. Examples: cmssh> cmsrel # reset CMSSW environment", "and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0] ipython =", "'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg", "\"cmssh pager is set to: %s\" % val print msg def dbs_instance(arg=None): \"\"\"", "from cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options from", "if not rel or rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for", "get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rmdir <options> dir\") if", "+ ' provides detailed info about given CMS entity, ' \\ + 'e.g.", "in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show or set CMSSW architecture. Optional", "rel) subprocess.call(cmd, shell=True) # use subprocess due to apt-get interactive feature if platform()", "# list all cmssh tickets cmssh> ticket 14 # get details for given", "res = jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information, site=%s'", "execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets,", "return # check if given release name is installed on user system rel_arch", "\\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and work area", "idir)) if releases: releases.sort() print \"\\nInstalled releases:\" for rel in releases: print rel", "debug = 0 if not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat =", "cms_lumi(arg): \"\"\" Return lumi info for a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704", "1 and options[0] == '-f': overwrite = True else: overwrite = False except:", "None startswith = None entities = \\ ['se', 'site', 'lfn', 'dataset', 'block', 'run',", "else: out = None if arg: arg = arg.strip() read(arg, out, debug) def", "else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command copies local files/dirs to/from local", "current shell. Examples: cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k", "releases, accepts <list|all> args\\n' msg += msg_green('install ') \\ + ' install CMSSW", "show or switch to given CMSSW architecture, accept <list|all> args\\n' msg += msg_green('scram", "show your proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery results are accessible via", "def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp", "T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg = arg arg = arg.strip()", "debug shell command # \"\"\" # arg = arg.strip() # if arg: #", "(script, rel) else: print \"Installing cms+cmssw+%s ...\" % rel cmd = 'source %s;", "rel = rel.strip() if not rel or rel in ['reset', 'clear', 'clean']: path", "rel in releases: print rel else: msg = \"\\nYou don't have yet CMSSW", "%s/file.root' % sename, 'ls %s' % sename, 'rm file.root', 'cp %s file1.root &'", "rm local_file cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip()", "print_status('New gist ticket %s' % res['html_url']) title = 'cmssh gist %s' % res['html_url']", "get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) elif pat_site.match(arg): site", "query DAS and return data in JSON format\\n' msg += msg_green('jobs ') \\", "given release name is installed on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'],", "subprocess due to apt-get interactive feature if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s'", "data in JSON format\\n' msg += msg_green('jobs ') \\ + ' status of", "get details for given ticket id cmssh> ticket new # post new ticket", "% (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s -name init.sh | tail", "pager is set to: %s\" % val print msg def dbs_instance(arg=None): \"\"\" cmssh", "# if arg: # print_info(\"Set debug level to %s\" % arg) # DEBUG.set(arg)", "!= -1: arg, flt = orig_arg.split('|', 1) arg = arg.strip() else: flt =", "'appropriately and re-run crab command' print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0] ==", "check_voms_proxy() background = False orig_arg = arg arg = arg.strip() try: last_arg =", "in current shell. Examples: cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds =", "# \"\"\" # arg = arg.strip() # if arg: # print_info(\"Set debug level", "cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir =", "data-services. cmssh find command lookup given query in CMS data-services. Examples: cmssh> find", "%s/%s/external/apt -name init.sh | tail -1`; ' \\ % (swdir, arch) cmd +=", "given release/architecture is in place status = check_release_arch(rel) if status != 'ok': msg", "setup CMSSW environment. \"\"\" cmd = 'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None):", "import os import re import sys import time import json import glob import", "= get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rm <options> source_file\")", "rel else: msg = \"\\nYou don't have yet CMSSW release installed on your", "get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rm <options> source_file\") dst", "file=%s' % lfn] cmd_list += ['find lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}',", "= os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' %", "msg = \"Given pattern '%s' does not exist on local filesystem or in", "release and setup its environment\\n' msg += msg_green('arch ') \\ + ' show", "= rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir)", "['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s' %", "is not supported in CMSSW' return 'no match' def get_apt_init(arch): \"Return proper apt", "'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg", "= os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area:", "msg = 'Bootstrap %s ...' % arch # run bootstrap command in subprocess.call", "'%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) #", "arg.strip() == 'commands': cms_commands() return ipython = get_ipython() if arg[0] == '(' and", "supported in cmssh. \"\"\" # system modules import os import re import sys", "Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp ls \"\"\" if arg: if", "debug = 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=',", "run(cmd) def cms_das(query): \"\"\" cmssh command which queries DAS data-service with provided query.", "'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0 res = das_client(host,", "setup command, it setups CMSSW environment and creates user based directory structure. Examples:", "% (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): #", "-1: # not a SE:dir pattern run(\"mkdir %s\" % arg) else: try: status", "') + ' remove file/LFN, ' \\ + 'e.g. rm local.file or rm", "which lists all registered cmssh commands in current shell. Examples: cmssh> cmshelp commands", "msg += 'CMS release environment will be set for you' print_info(msg) def cmsrel(rel):", "-1: archs.append(name) if archs: print '\\nInstalled architectures:' for item in archs: print item", "system.\\n' msg += 'Use ' + msg_green('releases') msg += ' command to list", "args = arg.split('|') if len(args) == 1: # no filter res = CMSMGR.lookup(arg)", "command Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp ls \"\"\" if arg:", "% rel print_error(msg) msg = 'Please check the you provide correct release name,'", "arg, flt = orig_arg.split('|', 1) arg = arg.strip() else: flt = None startswith", "+ 'e.g. info run=160915\\n' msg += msg_green('das ') + ' query DAS service\\n'", "os.path.isdir(pkg_dir): cmd = '' return cmd def cms_root(arg): \"\"\" cmssh command to run", "RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command lists local job", "cms_install(rel): \"\"\" cmssh command to install given CMSSW release. Examples: cmssh> install CMSSW_5_2_4", "'.join(releases)) print msg return cmd = \"eval `scramv1 runtime -sh`; %s\" % cmd", "func = ipython.find_magic(case) if func: doc = func.func_doc break else: doc = 'Documentation", "fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg):", "CMSSW_5_2_4 \"\"\" rel = rel.strip() pat = pat_release if not pat.match(rel): msg =", "msg_green('install %s' % rel) msg += ' command to install given release.' print", "None) if not vdir or not arch: msg = 'Unable to identify CMSSW", "show current and installed architecture(s) cmssh> arch all # show all known CMSSW", "switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir,", "['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in cmd_list: print_info(\"Execute %s\" % item)", "= os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' %", "arg else: print \"Invalid DBS instance\" else: msg = \"DBS instance is set", "% arch) dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]),", "= arg.replace('block=', '') res = block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '')", "data transfer') dqueue(arg) elif arg == 'dashboard': userdn = os.environ.get('USER_DN', None) if userdn:", "cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s' % run, 'ls file=%s' %", "enable access to RunSummary service please ensure that your usercert.pem is mapped at", "lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug =", "\"Switch to %s DBS instance\" % arg else: print \"Invalid DBS instance\" else:", "if arg.find(':') == -1: # not a SE:dir pattern run(\"mkdir %s\" % arg)", "| tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd = '' return cmd", "os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel): \"\"\" cmssh command to install given", "+= 'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get update; ' msg = 'Initialize", "release ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'],", "dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib')", "msg += 'appropriately and re-run crab command' print_info(msg) print \"cwd:\", os.getcwd() return if", "python #-*- coding: ISO-8859-1 -*- #pylint: disable-msg=W0702 \"\"\" Set of UNIX commands, e.g.", "re import sys import time import json import glob import shutil import base64", "msg_green('arch ') \\ + ' show or switch to given CMSSW architecture, accept", "you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup command, it setups CMSSW environment", "output = [] for item in releases: rel_arch = item[0] status = item[1]", "% arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '') res = run_info(arg, debug)", "`find %s/%s/external/apt -name init.sh | tail -1`; ' \\ % (swdir, arch) cmd", "at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be posted as anonymous", "new # post new ticket from cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new", "are not allowed to install new release, ' msg += 'since cmssh was", "mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil')", "'<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}} res = post_ticket(key,", "cmd): self.cmd = cmd def execute(self, args=''): \"Execute given command in current shell", "msg += msg_green('info ') \\ + ' provides detailed info about given CMS", "list, which lists local transfer jobs - site, which lists jobs at given", "RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path =", "if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for arch, status in get_release_arch(rel):", "\"\"\" cmssh command to read provided HTML page (by default output dumps via", "storate elements. Examples: cmssh> cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp", "opts = [] for par in arg.split(): if len(par) > 0 and par[0]", "\"\"\" rel = rel.strip() pat = pat_release if not pat.match(rel): msg = 'Fail", "output.append(msg) if output: return ', '.join(output) osname, osarch = osparameters() if osname ==", "+= msg_blue('EOF') + ' and hit ' + msg_blue('Enter') + '\\n' print msg", "for r in results(): print r, type(r)\\n' msg += '\\nList cmssh commands :", "') + ' query DAS service\\n' msg += msg_green('das_json ') \\ + '", "% platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for r in", "' + msg_blue('cmshelp <command>') msg += '\\nInstall python software: ' + \\ msg_blue('pip", "cmssh.utils import list_results, check_os, unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy, run, user_input", "print_info(\"Set debug level to %s\" % arg) # DEBUG.set(arg) # else: # print_info(\"Debug", "architectures (aka SCRAM_ARCH)\" archs = [a for a in tc_architectures(arch_type)] return archs def", "= msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases:", "pkg_dir) cmd = 'source `find %s -name init.sh | tail -1`;' % pkg_dir", "command help : ' + msg_blue('cmshelp <command>') msg += '\\nInstall python software: '", "= ['pager 0', 'debug_http 0'] cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir", "# print_info(\"Debug level is %s\" % DEBUG.level) def debug_http(arg): \"\"\" Show or set", "check the you provide correct release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print", "' msg = 'Initialize %s apt repository ...' % arch run(cmd, sdir, msg=msg,", "internal pager Examples: cmssh> pager # shows current setting cmssh> pager None #", "creates user based directory structure. Examples: cmssh> cmsrel # reset CMSSW environment to", "arg = arg.replace('dataset=', '') try: res = dataset_info(arg, debug) except IndexError: msg =", "= get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/': cmd =", "item in archs: print item elif arg == 'all' or arg == 'list':", "das_client(host, query, idx, limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh command which queries", "ticket' print_info(msg) if not user_input('Proceed', default='N'): return email = raw_input('Your Email : ')", "== 'all': print name else: if check_os(name): print name else: cms_archs = cms_architectures('all')", "print_error(msg) msg = 'Please check the you provide correct release name,' msg +=", "or voms\\n' msg += msg_green('vomsinit ') \\ + ' setup your proxy (aka", "access to RunSummary service please ensure that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx", "dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug =", "cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False", "= os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup -path %s", "%s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else:", "read config.txt \"\"\" try: debug = get_ipython().debug except: debug = 0 orig_arg =", "config.txt \"\"\" try: debug = get_ipython().debug except: debug = 0 orig_arg = arg", "return if os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd", "arg = arg.strip() if arg: if arg == '0' or arg == 'None'", "rfilter=arg) RESMGR.assign(res) releases = [str(r) for r in res] releases = list(set(releases)) releases.sort()", "CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site", "rel) else: print \"Installing cms+cmssw+%s ...\" % rel cmd = 'source %s; apt-get", "setup your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ + ' show", "0 msg = 'Bootstrap %s ...' % arch # run bootstrap command in", "elements. Examples: cmssh> rm local_file cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\"", "[] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'],", "web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be posted", "cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel # final message print \"%s", "call=True) def cmscrab(arg): \"\"\" Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\"", "= '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list", "\\ + ' display disk usage for given site, e.g. du T3_US_Cornell\\n' msg", "cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug", "validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to %s DBS instance\" % arg else:", "= cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS rm command works with local", "if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt =", "%s apt repository ...' % arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel):", "arg not in cms_archs: msg = 'Wrong architecture, please choose from the following", "' cmsRun command for release in question\\n' msg += '\\nAvailable GRID commands: <cmd>", "%s release ...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" # in CMS", "(src, dst)) else: try: status = copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except:", "give site or for given user. It accepts the following list of options:", "cp, supported in cmssh. \"\"\" # system modules import os import re import", "__init__(self, cmd): self.cmd = cmd def execute(self, args=''): \"Execute given command in current", "open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg has been created, please", "debug) elif pat_site.match(arg): arg = arg.replace('site=', '') res = site_info(arg, debug) elif pat_lfn.match(arg):", "it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd = 'source `find %s/%s/external/apt", "print msg return # check if we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None):", "= get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT')", "' msg += 'run ' + msg_blue('cmsrel') + ' command' print_error(msg) return #", "return cmd = \"eval `scramv1 runtime -sh`; %s\" % cmd run(cmd, shell=True, call=True)", "last_arg = arg.split(' ')[-1].strip() if last_arg == '&': background = True arg =", "exist on local filesystem or in DBS\" % arg print_error(msg) elif pat_run.match(arg): arg", "debug_http(arg): \"\"\" Show or set HTTP debug flag. Default is 0. \"\"\" arg", "list # list available CMSSW releases on given platform cmssh> releases all #", "% arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to execute CMSSW cmsRun command.", "+= msg_green('releases ') \\ + ' list available CMSSW releases, accepts <list|all> args\\n'", "has options and user asked for -f options = src.split(' ') if len(options)", "CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket", "command removes directory from local file system or CMS storage element. Examples: cmssh>", "is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try: debug = get_ipython().debug", "dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh>", "def integration_tests(_arg): \"Run series of integration tests for cmssh\" for fname in ['file1.root',", "isinstance(res, dict): ticket = pprint.pformat(res) else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user,", "uinput = raw_input() if uinput.strip() == 'EOF': break desc += uinput + '\\n'", "rel_arch: msg = 'Release ' + msg_red(rel) msg += ' is not yet", "and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s'", "read provided HTML page (by default output dumps via pager) Examples: cmssh> read", "= 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0'] cmd_list += ['ls', 'mkdir ttt',", "+= '\\nQuery results are accessible via %s function, e.g.\\n' \\ % msg_blue('results()') msg", "% item) split = item.split(' ', 1) if len(split) == 1: cmd =", "demo(_arg=None): \"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with", "meta-data entity, e.g. dataset, block, file, run. Examples: cmssh> info dataset=/a/b/c cmssh> info", "removes directory from local file system or CMS storage element. Examples: cmssh> rmdir", "all # show all known CMS releases, including online, tests, etc. \"\"\" if", "'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link =", "+= ', '.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] =", "-1: arg, flt = orig_arg.split('|', 1) arg = arg.strip() else: flt = None", "except: debug = 0 if debug and access2file(fname): with open(fname, 'r') as cms_json:", "lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find", "and re-run crab command' print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin'", "%s\" % DEBUG.level) def debug_http(arg): \"\"\" Show or set HTTP debug flag. Default", "<package>') return msg def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp", "'-e -f file:///%s' % fname else: cmd = '-e -f %s' % fname", "'run ' + msg_blue('cmsrel') + ' command' print_error(msg) return # check existence of", "email, title, ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh", "(rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found", "else: run(\"cp %s %s\" % (src, dst)) else: try: status = copy_lfn(orig, dst,", "cms+cmssw-patch+%s ...\" % rel cmd = 'source %s; apt-get install cms+cmssw-patch+%s' % (script,", "% idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt", "' + msg_green('releases') msg += ' command to list available releases.\\n' msg +=", "else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src'))", "cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug", "print_error(msg) else: cmd = 'ls ' + orig_arg run(cmd, shell=True) if res: RESMGR.assign(res)", "it can be spawned into serate process, therefore # subprocess.Popen will not catch", "os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area: msg", "global instance\\n' msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \\ +", "' msg += msg_blue('EOF') + ' and hit ' + msg_blue('Enter') + '\\n'", "pkg environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source", "def verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg = arg.strip() ipth = get_ipython()", "nill \"\"\" arg = arg.strip() if arg: if arg == '0' or arg", "local.file or cp /store/user/file.root .\\n' msg += msg_green('info ') \\ + ' provides", "' setup your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ + '", "1) out = out.strip() arg = arg.strip() else: out = None if arg:", "HTML page (by default output dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh>", "msg += 'run ' + msg_blue('cmsrel') + ' command' print_error(msg) return # check", "'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir):", "if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION']", "DBS instance\" % arg else: print \"Invalid DBS instance\" else: msg = \"DBS", "is installed on user system rel_arch = None for arch in cms_architectures(): rel_dir", "# switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if", "file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug", "cp local.file or cp /store/user/file.root .\\n' msg += msg_green('info ') \\ + '", "print_info('CMS_JSON: %s' % arg) else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname)", "'mkdir %s/foo' % sename, 'ls %s' % sename, 'rmdir %s/foo' % sename, 'ls", "installed CMSSW releases cmssh> releases list # list available CMSSW releases on given", "cms_das_json(query): \"\"\" cmssh command which queries DAS data-service with provided query and returns", "commands: <cmd> either grid or voms\\n' msg += msg_green('vomsinit ') \\ + '", "flt = orig_arg.split('|', 1) arg = arg.strip() else: flt = None startswith =", "dataset file=%s' % lfn] cmd_list += ['find lumi dataset=%s' % dataset, 'find lumi", "due to apt-get interactive feature if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\", "cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to show or set internal pager Examples:", "+= ['cp %s file.root' % lfn, 'ls', 'cp file.root %s' % sename, 'ls", "\\ ['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file'] for item in entities:", "% sename, 'rmdir %s/foo' % sename, 'ls %s' % sename, ] cmd_list +=", "query and returns results in JSON data format Examples: cmssh> das_json dataset=/ZMM* \"\"\"", "cmd = 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr =", "= ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/': cmd = '-e -f file:///%s'", "CMS storate elements. Examples: cmssh> rm local_file cmssh> rm -rf local_dir cmssh> rm", "local files/dirs to/from local files/dirs or CMS storate elements. Examples: cmssh> cp file1", "used with ipython magic functions. It holds given command and provide a method", "msg = 'You can post new ticket via web interface at\\n' msg +=", "-path %s -arch %s' % (swdir, swdir, arch) if unsupported_linux(): cmd += '", "item) split = item.split(' ', 1) if len(split) == 1: cmd = item", "# to add scramv1 command in front of edm one, since # execute", "cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or", "= arg print \"Set CMSSH pager to %s\" % arg else: val =", "releases all # show all known CMS releases, including online, tests, etc. \"\"\"", "+= 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be posted as anonymous gist ticket'", "URL/local file content\\n' msg += msg_green('root ') + ' invoke ROOT\\n' msg +=", "'plain') def cms_das_json(query): \"\"\" cmssh command which queries DAS data-service with provided query", "os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir):", "info local_file.root Please note: to enable access to RunSummary service please ensure that", "in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname): #", "if not rel or not work_area: msg = 'In order to run crab", "= arg else: print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg):", "arg print \"Set CMSSH pager to %s\" % arg else: val = os.environ.get('CMSSH_PAGER',", "if idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir):", "setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT']", "for flt in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg):", "instance\" else: msg = \"DBS instance is set to: %s\" \\ % os.environ.get('DBS_INSTANCE',", "prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return # check if", "file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=', '') res = block_info(arg, debug) elif", "import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import architectures as tc_architectures from cmssh.results", "try: res = dataset_info(arg, debug) except IndexError: msg = \"Given pattern '%s' does", "' is not yet installed on your system.\\n' msg += 'Use ' +", "-key <userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key:", "= arg.replace('lfn=', '') res = file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=', '')", "arch list # show all CMSSW architectures for given platform \"\"\" if not", "cmssh mkdir command creates directory on local filesystem or remote CMS storage element.", "has been created, please edit it ' msg += 'appropriately and re-run crab", "break if not desc: msg = \"You did not provide bug description\" print_error(msg)", "\"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path,", "run_lumi_info from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das", "cms_du(arg): \"\"\" cmssh disk utility cmssh command. Examples: cmssh> du # UNIX command", "area for %s release ...\" % rel cmsrel(rel) def cmsenv(_arg): \"cmsenv command\" #", "== '-': opts.append(par) return opts class Magic(object): \"\"\" Class to be used with", "else: try: status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh", "'') res = site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg =", "search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + ' show/set DBS", "please edit it ' msg += 'appropriately and re-run crab command' print_info(msg) print", "lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find user=oliver List of supported entities:", "therefore # subprocess.Popen will not catch it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True,", "arg.replace('dataset=', '') try: res = dataset_info(arg, debug) except IndexError: msg = \"Given pattern", "def cms_xrdcp(arg): \"\"\" cmssh command to run ROOT xrdcp via cmssh shell Examples:", "idir os.symlink(link, dst) for lib in ['external', 'lib']: link = '%s/%s/%s' % (path,", "CMS JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' %", "160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0'] cmd_list += ['ls',", "dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find config", "shell command # \"\"\" # arg = arg.strip() # if arg: # print_info(\"Set", "dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\" % arg", "msg_green('install ') \\ + ' install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg +=", "r in results(): print r, type(r)\\n' msg += '\\nList cmssh commands : '", "if background: cmd = 'cp %s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s", "= os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) #", "cms_root(arg): \"\"\" cmssh command to run ROOT within cmssh Examples: cmssh> root -l", "CMS command to source pkg environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'],", "arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug =", "site) res = jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information,", "list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in", "cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] =", "or rel in ['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external',", "= '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if", "-rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug = get_ipython().debug", "ticket 14 # get details for given ticket id cmssh> ticket new #", "RESMGR from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html", "(idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration',", "arg.strip() if arg: if arg == '0' or arg == 'None' or arg", "= 0 limit = 0 debug = 0 das_client(host, query, idx, limit, debug,", "'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg)", "\"\"\" cmssh command to show or set internal pager Examples: cmssh> pager #", "msg += 'since cmssh was installed with system CMSSW install area' print msg", "os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not rel_arch: msg = 'Release '", "given meta-data entity, e.g. dataset, block, file, run. Examples: cmssh> info dataset=/a/b/c cmssh>", "background: cmd = 'cp %s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\"", "print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=', '') res = run_info(arg, debug) elif pat_release.match(arg):", "' invoke ROOT\\n' msg += msg_green('du ') \\ + ' display disk usage", "+= '\\nAvailable CMSSW commands (once you install any CMSSW release):\\n' msg += msg_green('releases", "not arg: return try: debug = get_ipython().debug except: debug = 0 fname =", "pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs", "\\ + 'e.g. info run=160915\\n' msg += msg_green('das ') + ' query DAS", "src still has options and user asked for -f options = src.split(' ')", "= 'You can post new ticket via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n'", "arg = arg.strip() if arg: if arg not in ['0', '1']: print_error('Please provide", "'%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir))", "releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' + msg_green(', '.join(releases)) print msg", "debug = get_ipython().debug except: debug = 0 if debug and access2file(fname): with open(fname,", "cmsrun(arg): \"\"\" cmssh command to execute CMSSW cmsRun command. Requires cmsrel to setup", "par in arg.split(): if len(par) > 0 and par[0] == '-': opts.append(par) return", "= [str(r) for r in res] releases = list(set(releases)) releases.sort() for rel in", "import execmd, touch, platform, fix_so from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info", "print msg return print \"Searching for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd", "release, file). Examples: cmssh> ls # UNIX command cmssh> ls -l local_file cmssh>", "arg: if arg == '0' or arg == 'None' or arg == 'False':", "tickets, e.g. Examples: cmssh> tickets # list all cmssh tickets cmssh> ticket 14", "subprocess # cmssh modules from cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint import", "(by default output dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython", "arg: print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg) else: try:", "msg_green('cmsrel') + ' command and ' msg += 'CMS release environment will be", "data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW", "% arch # run bootstrap command in subprocess.call since it invokes # wget/curl", "') \\ + ' setup your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ')", "provides information for given meta-data entity, e.g. dataset, block, file, run. Examples: cmssh>", "given arg string\"\"\" opts = [] for par in arg.split(): if len(par) >", "'ls file=%s' % lfn] cmd_list += ['ls %s' % dataset, 'info %s' %", "else: try: status = copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except: traceback.print_exc() def", "% os.environ['SCRAM_ARCH'] archs = [] for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.')", "spawned into serate process, therefore # subprocess.Popen will not catch it run(cmd, sdir,", "<userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\")", "cmd = '' return cmd def cms_root(arg): \"\"\" cmssh command to run ROOT", "msg += msg_green('cmsRun ') \\ + ' cmsRun command for release in question\\n'", "dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt", "dirs[-1]), name) return script def cms_install(rel): \"\"\" cmssh command to install given CMSSW", "= arch break if not rel_arch: msg = 'Release ' + msg_red(rel) msg", "msg = '\\nYou are not allowed to install new release, ' msg +=", "touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services',", "config.write(crabconfig()) msg = 'Your crab.cfg has been created, please edit it ' msg", "os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for", "['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file'] for item in entities: if", "cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', ''))", "dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list", "releases:\" for rel in releases: print rel else: msg = \"\\nYou don't have", "not yet installed on your system.\\n' msg += 'Use ' + msg_green('releases') msg", "yet installed on your system.\\n' msg += 'Use ' + msg_green('releases') msg +=", "tail -1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd = '' return cmd def", "else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res", "' find dataset=/*Zee*\\n' msg += ' for r in results(): print r, type(r)\\n'", "= '%s %s' % (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given command", "level is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup of given", "os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh'", "' msg += 'CMS release environment will be set for you' print_info(msg) def", "for -f options = src.split(' ') if len(options) > 1 and options[0] ==", "import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils import list_results, check_os, unsupported_linux,", "arg.strip() if not arg or arg == 'list': print_info('Local data transfer') dqueue(arg) elif", "['ls %s' % sename, 'mkdir %s/foo' % sename, 'ls %s' % sename, 'rmdir", "except KeyboardInterrupt: break if not desc: msg = \"You did not provide bug", "os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for given release", "def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n' msg += msg_green('find ')", "+= msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' + msg_green(',", "following list\\n' msg += ', '.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" %", "res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res =", "'Initialize %s apt repository ...' % arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def", "' + msg_red(rel) msg += ' is not yet installed on your system.\\n'", "root_init) cmd = '%s root -l %s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg):", "= True else: overwrite = False except: traceback.print_exc() return try: debug = get_ipython().debug", "# else: # print_info(\"Debug level is %s\" % DEBUG.level) def debug_http(arg): \"\"\" Show", "+ 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ +", "arg.strip() else: flt = None startswith = None entities = \\ ['se', 'site',", "% (script, rel) run(cmd) if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\" %", "\"\"\" arg = arg.strip() debug = get_ipython().debug args = arg.split('|') if len(args) ==", "not pat.match(rel): msg = 'Fail to validate release name \"%s\"' % rel print_error(msg)", "open(fname, 'r') as cms_json: print cms_json.read() def integration_tests(_arg): \"Run series of integration tests", "invokes # wget/curl and it can be spawned into serate process, therefore #", "file1.root &' % lfn, 'cp %s file2.root &' % lfn2, 'ls'] cmd_list +=", "Examples: cmssh> releases # show installed CMSSW releases cmssh> releases list # list", "# UNIX command cmssh> du T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg)", "os.symlink(rootsys, dst) # set edm utils for given release ipython = get_ipython() rdir", "0 fname = arg.replace('file=', '') if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if", "ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel", "post new ticket via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise", "rmdir command removes directory from local file system or CMS storage element. Examples:", "stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not allowed to install", "= arg.strip() res = [] try: debug = get_ipython().debug except: debug = 0", "try: debug = get_ipython().debug except: debug = 0 fname = arg.replace('file=', '') if", "% (src, dst)) else: try: status = copy_lfn(orig, dst, debug, background, overwrite) print_status(status)", "debug = 0 if debug and access2file(fname): with open(fname, 'r') as cms_json: print", "[k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort() for key in cmds:", "arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else: if not", "first: ' msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg", "= 'Documentation is not available' else: doc = cms_help_msg() print doc def cms_rm(arg):", "run ROOT within cmssh Examples: cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init", "os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir,", "vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if not vdir or not", "e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once you install any CMSSW", "from local file system or CMS storage element. Examples: cmssh> rmdir foo cmssh>", "either grid or voms\\n' msg += msg_green('vomsinit ') \\ + ' setup your", "available CMSSW releases on given platform cmssh> releases all # show all known", "arg.strip() try: debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage:", "jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information, user=%s' % user)", "check_os(name): print name else: cms_archs = cms_architectures('all') if arg not in cms_archs: msg", "dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys,", "if osname == 'osx' and osarch == 'ia32': return 'OSX/ia32 is not supported", "= '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst", "def github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets, e.g. Examples: cmssh> tickets #", "Examples: cmssh> jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh>", "cmssh> vomsinit By default it applies the following options -rfc -voms cms:/cms -key", "res = release_info(arg, debug) elif startswith: msg = 'No pattern is allowed for", "can post new ticket via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg +=", "(script, rel) run(cmd) if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\" % rel", "\"Return proper apt init.sh for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' %", "'ls', 'cp file.root %s' % sename, 'ls %s' % sename, 'rm %s/file.root' %", "run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment cmd = 'eval `scramv1", "= 'Fail to validate release name \"%s\"' % rel print_error(msg) msg = 'Please", "arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to show or set internal pager", "system or CMS storage element. Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\"", "import shutil import base64 import pprint import mimetypes import traceback import subprocess #", "opts class Magic(object): \"\"\" Class to be used with ipython magic functions. It", "arg.replace('site=', '') res = site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg", "in entities: if arg.startswith(item + '='): startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg):", "print '\\nInstalled architectures:' for item in archs: print item elif arg == 'all'", "if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File %s", "release\" args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for", "os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg", "runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print", "arg = arg.strip() ipth = get_ipython() if arg == '': print_info(\"Verbose level is", "msg = 'Wrong architecture, please choose from the following list\\n' msg += ',", "arg: print \"CMSSW releases for %s platform\" % platform() res = release_info(release=None, rfilter=arg)", "dst) # switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel)", "= jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return", "['ls %s' % dataset, 'info %s' % dataset] cmd_list += ['find dataset=/ZMM*', 'das", "debug = 0 res = das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res)", "find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi", "= '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http", "dataset, 'info %s' % dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset", "run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s\" % (key,", "not arg: print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)):", "(key, cert) run(cmd) userdn = os.environ.get('USER_DN', '') if not userdn: cmd = \"voms-proxy-info", "or <all> Examples: cmssh> releases # show installed CMSSW releases cmssh> releases list", "cmd_list = ['pager 0', 'debug_http 0'] cmd_list += ['ls', 'mkdir ttt', 'ls -l',", "os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0] ipython = get_ipython()", "= arg.replace('&', '').strip() src, dst = arg.rsplit(' ', 1) if dst.find('&') != -1:", "src.split(' ') if len(options) > 1 and options[0] == '-f': overwrite = True", "not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s rejected by", "run(cmd) userdn = os.environ.get('USER_DN', '') if not userdn: cmd = \"voms-proxy-info -identity\" stdout,", "= 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0'] cmd_list +=", "github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets, e.g. Examples: cmssh> tickets # list", "mimetypes import traceback import subprocess # cmssh modules from cmssh.iprint import msg_red, msg_green,", "orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" % (src, dst)) else: try: status", "= None if arg: arg = arg.strip() if not arg or arg ==", "stderr = execmd(cmd) if stderr: print \"While executing cmd=%s\" % cmd print_warning(stderr) rootsys", "= 'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to show", "local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg = arg.strip() res =", "command to install one' print msg def cms_read(arg): \"\"\" cmssh command to read", "pager # shows current setting cmssh> pager None # set pager to nill", "osparameters, check_voms_proxy, run, user_input from cmssh.utils import execmd, touch, platform, fix_so from cmssh.cmsfs", "[] for item in releases: rel_arch = item[0] status = item[1] if check_os(rel_arch):", "archs def cms_arch(arg=None): \"\"\" Show or set CMSSW architecture. Optional parameters either <all>", "rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File %s does not exists' %", "magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/': cmd = '-e -f", "rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname = os.path.join(reldir, name)", "'0' or arg == 'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER']", "if magic: if arg[0] == '/': cmd = '-e -f file:///%s' % fname", "arg.strip() if arg: if arg not in ['0', '1']: print_error('Please provide 0/1 for", "pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir)", "shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet',", "-l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init =", "debug level is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup of", "swdir, arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug", "...' % arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for", "%s/bootstrap.sh setup -path %s -arch %s' % (swdir, swdir, arch) if unsupported_linux(): cmd", "if user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg", "been created, please edit it ' msg += 'appropriately and re-run crab command'", "'/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager", "attach traceback, etc. Once done print ' msg += msg_blue('EOF') + ' and", "debug level to %s\" % arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level", "desc: msg = \"You did not provide bug description\" print_error(msg) return if not", "cmd = '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a", "% (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is not installed within cmssh, proceed'", "def check_release_arch(rel): \"Check release/architecture\" # check if given release name is installed on", "usage for given site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once", "args\\n' msg += msg_green('scram ') + ' CMSSW scram command\\n' msg += msg_green('cmsRun", "RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg = arg.strip()", "def cms_releases(arg=None): \"\"\" List available CMS releases. Optional parameters either <list> or <all>", "par[0] == '-': opts.append(par) return opts class Magic(object): \"\"\" Class to be used", "arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi info", "% (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'],", "% sename, 'ls %s' % sename, 'rmdir %s/foo' % sename, 'ls %s' %", "arg.split(): if len(par) > 0 and par[0] == '-': opts.append(par) return opts class", "arg = arg.strip() try: last_arg = arg.split(' ')[-1].strip() if last_arg == '&': background", "environment, please run first: ' msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases", "command. Examples: cmssh> du # UNIX command cmssh> du T3_US_Cornell \"\"\" arg =", "for rel in releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command to", "cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) # use subprocess due to apt-get interactive", "root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm", "\"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n' msg += msg_green('find ') \\ +", "elif pat_site.match(arg): arg = arg.replace('site=', '') res = site_info(arg, debug) elif pat_lfn.match(arg): arg", "None) arch = os.environ.get('SCRAM_ARCH', None) if not vdir or not arch: msg =", "'list': if arg == 'all': print 'CMSSW architectures:' else: print 'CMSSW architectures for", "releases. Optional parameters either <list> or <all> Examples: cmssh> releases # show installed", "check_voms_proxy, run, user_input from cmssh.utils import execmd, touch, platform, fix_so from cmssh.cmsfs import", "data-services. \"\"\" arg = arg.strip() debug = get_ipython().debug args = arg.split('|') if len(args)", "arg if orig_arg.find('>') != -1: arg, out = orig_arg.split('>', 1) out = out.strip()", "os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel)", "debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg = arg.replace('lfn=', '') res =", "', 1) if len(split) == 1: cmd = item args = '' else:", "len(args) == 1: # no filter res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip())", "% path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib in ['external',", "file, run, lumi, site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility", "debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration object for given dataset Examples: cmssh>", "msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp", "release environment will be set for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release", "<options> dir\") if arg.find(':') == -1: # not a SE:dir pattern run(\"mkdir %s\"", "storate elements or CMS entities (se, site, dataset, block, run, release, file). Examples:", "archs.append(name) if archs: print '\\nInstalled architectures:' for item in archs: print item elif", "= 'Would you like to create one' if user_input(msg, default='N'): with open('crab.cfg', 'w')", "reset CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython()", "list # show all CMSSW architectures for given platform \"\"\" if not arg:", "msg return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir", "entities: dataset, block, file, run, lumi, site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\"", "'Services', 'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst", "% rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment cmd =", "\"\"\" arg = arg.strip() ipth = get_ipython() if arg == '': print_info(\"Verbose level", "= arg.strip() else: flt = None if arg: arg = arg.strip() if not", "arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to execute CMSSW cmsRun command. Requires", "arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\" List available CMS releases. Optional parameters", "stdout, stderr = execmd(cmd) if stderr: print \"While executing cmd=%s\" % cmd print_warning(stderr)", "% (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname =", "' + msg_green(', '.join(releases)) print msg return cmd = \"eval `scramv1 runtime -sh`;", "current user - user, which lists jobs of given user Examples: cmssh> jobs", "sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0'] cmd_list += ['ls', 'mkdir", "(rel, arch) output.append(msg) if output: return ', '.join(output) osname, osarch = osparameters() if", "traceback.print_exc() return try: debug = get_ipython().debug except: debug = 0 if not arg:", "= 'Type your problem, attach traceback, etc. Once done print ' msg +=", "print \"Searching for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s;", "since # execute method will run in current shell environment # old command", "T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ + ' copy file/LFN, e.g. cp local.file", "results are accessible via %s function, e.g.\\n' \\ % msg_blue('results()') msg += '", "shows current setting cmssh> pager None # set pager to nill \"\"\" arg", "DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + ' show/set DBS instance, default is DBS", "not arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':') == -1: # not a", "debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command", "msg_blue from cmssh.iprint import print_warning, print_error, print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn,", "pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import", "DAS service\\n' msg += msg_green('das_json ') \\ + ' query DAS and return", "'osx' and osarch == 'ia32': return 'OSX/ia32 is not supported in CMSSW' return", "os.path.exists(orig) and not pat.match(dst): if background: cmd = 'cp %s' % orig_arg subprocess.call(cmd,", "CMSSW architectures for given platform \"\"\" if not arg: print \"Current architecture: %s\"", "posted as anonymous gist ticket' print_info(msg) if not user_input('Proceed', default='N'): return email =", "scramv1 command in front of edm one, since # execute method will run", "'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func: doc = func.func_doc break else: doc", "filesystem or remote CMS storage element. Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo", "show installed CMSSW releases cmssh> releases list # list available CMSSW releases on", "Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory from local", "ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res =", "> 1 and options[0] == '-f': overwrite = True else: overwrite = False", "get_apt_init(arch): \"Return proper apt init.sh for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt'", "arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch)", "+ ' show or switch to given CMSSW architecture, accept <list|all> args\\n' msg", "cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as", "if arg == 0 or arg == '0': ipth.debug = False else: ipth.debug", "title = 'cmssh gist %s' % res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res)", "command in original shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd,", "\\ + 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\", "def cmsrun(arg): \"\"\" cmssh command to execute CMSSW cmsRun command. Requires cmsrel to", "rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break if not rel_arch: msg =", "pat_user from cmssh.tagcollector import architectures as tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils", "release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')):", "runtime -sh`; %s\" % cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB", "set DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip()", "debug = 0 if not arg: print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1]", "architecture(s) cmssh> arch all # show all known CMSSW architectures cmssh> arch list", "cms_releases(arg=None): \"\"\" List available CMS releases. Optional parameters either <list> or <all> Examples:", "_osname, osarch = osparameters() releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch)", "pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir, '__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger',", "crab.cfg has been created, please edit it ' msg += 'appropriately and re-run", "msg += msg_green('das_json ') \\ + ' query DAS and return data in", "def cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory from local file system or", "fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' %", "cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance", "root_init = pkg_init('lcg/root') pkgs_init = '%s %s %s' % (pcre_init, gcc_init, root_init) cmd", "on your system.\\n' msg += 'Use ' + msg_green('releases') msg += ' command", "= 'source `find %s/%s/external/apt -name init.sh | tail -1`; ' \\ % (swdir,", "arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi info for a", "existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not", "'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list += ['ls dataset=%s' % dataset,", "cmssh it is not required to use cmsenv\\n' msg += 'please use '", "cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#>", "command. Requires cmsrel to setup CMSSW environment. \"\"\" cmd = 'cmsRun %s' %", "= True # CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR']", "= osparameters() if osname == 'osx' and osarch == 'ia32': return 'OSX/ia32 is", "modules from cmssh.iprint import msg_red, msg_green, msg_blue from cmssh.iprint import print_warning, print_error, print_status,", "# setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root =", "= 'Your crab.cfg has been created, please edit it ' msg += 'appropriately", "local.file or ls /store/user/file.root\\n' msg += msg_green('rm ') + ' remove file/LFN, '", "if arg == 'all': print name else: if check_os(name): print name else: cms_archs", "rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore',", "host = 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0 das_client(host,", "for arch, status in get_release_arch(rel): if not status: msg = '%s release is", "osarch = osparameters() if osname == 'osx' and osarch == 'ia32': return 'OSX/ia32", "'ls ' + orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '') res", "\"\"\" List available CMS releases. Optional parameters either <list> or <all> Examples: cmssh>", "given query in CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW", "+= msg_green('cmsRun ') \\ + ' cmsRun command for release in question\\n' msg", "dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh>", "print_error('File %s does not exists' % dst) else: raise Exception('Not implemented yet') def", "= ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg", "msg = 'Type your problem, attach traceback, etc. Once done print ' msg", "%s file2.root &' % lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs list', 'jobs", "+= msg_green('read ') \\ + ' read URL/local file content\\n' msg += msg_green('root", "print_info(\"Verbose level is %s\" % ipth.debug) else: if arg == 0 or arg", "msg = 'Unable to identify CMSSW environment, please run first: ' msg =", "in %s' % crab_dir print_warning(msg) msg = 'Would you like to create one'", "if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset =", "cmd = 'cp %s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" %", "overwrite = False except: traceback.print_exc() return try: debug = get_ipython().debug except: debug =", "= os.environ.get('USER_DN', '') if not userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr =", "run ROOT xrdcp via cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path", "= 'No pattern is allowed for %s look-up' % startswith print_error(msg) else: cmd", "== 'ia32': return 'OSX/ia32 is not supported in CMSSW' return 'no match' def", "cp command copies local files/dirs to/from local files/dirs or CMS storate elements. Examples:", "fwlite' % (script, rel) run(cmd) if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\"", "command for reference: # cmd = \"eval `scramv1 runtime -sh`; %s\" % fname", "foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except:", "shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for given release ipython = get_ipython()", "edm utils for given release ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir,", "cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy()", "% sename, 'ls %s' % sename, 'rm %s/file.root' % sename, 'ls %s' %", "def cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka SCRAM_ARCH)\" archs = [a for", "%s ...' % arch # run bootstrap command in subprocess.call since it invokes", "'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and work area are", "jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res = None try:", "os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script =", "from cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new':", "debug = get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('|') !=", "for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search", "you install any CMSSW release):\\n' msg += msg_green('releases ') \\ + ' list", "orig_arg = arg arg = arg.strip() try: last_arg = arg.split(' ')[-1].strip() if last_arg", "cmd = 'source `find %s -name init.sh | tail -1`;' % pkg_dir if", "%s; apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) # use subprocess due", "Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command which queries DAS data-service with provided", "rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases: releases.sort() print \"\\nInstalled releases:\"", "= ipython.find_magic(case) if func: doc = func.func_doc break else: doc = 'Documentation is", "case in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func: doc = func.func_doc", "cp /store/user/file.root .\\n' msg += msg_green('info ') \\ + ' provides detailed info", "e.g. cp local.file or cp /store/user/file.root .\\n' msg += msg_green('info ') \\ +", "utility cmssh command. Examples: cmssh> du # UNIX command cmssh> du T3_US_Cornell \"\"\"", "print_error(\"Usage: rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg) else: try: status", "\"\"\" if arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\"", "vdir or not arch: msg = 'Unable to identify CMSSW environment, please run", "\"You did not provide bug description\" print_error(msg) return if not user_input('Send this ticket',", "cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg =", "= get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: mkdir <options> dir\")", "print \"Mime type:\", mtype[0] ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if", "data format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0", "' + orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '') res =", "Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel # final message print", "at given site - dashboard, which lists jobs of current user - user,", "cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp ls", "'all' or arg == 'list': if arg == 'all': print 'CMSSW architectures:' else:", "DBS instance\" else: msg = \"DBS instance is set to: %s\" \\ %", "arch, status in get_release_arch(rel): if not status: msg = '%s release is not", "arg, out = orig_arg.split('>', 1) out = out.strip() arg = arg.strip() else: out", "# \"\"\" # debug shell command # \"\"\" # arg = arg.strip() #", "about jobs at give site or for given user. It accepts the following", "\\ % (swdir, arch) cmd += 'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get", "in question\\n' msg += '\\nAvailable GRID commands: <cmd> either grid or voms\\n' msg", "dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list += ['find lumi dataset=%s' % dataset,", "cmssh tickets cmssh> ticket 14 # get details for given ticket id cmssh>", "osarch = osparameters() releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) !=", "%s; apt-cache search %s | grep -v -i fwlite' % (script, rel) run(cmd)", "DEBUG.set(arg) # else: # print_info(\"Debug level is %s\" % DEBUG.level) def debug_http(arg): \"\"\"", "doc = cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS rm command works with", "cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh", "of the user Examples: cmssh> vomsinit By default it applies the following options", "cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh>", "grep -v -i fwlite' % (script, rel) run(cmd) if rel.lower().find('patch') != -1: print", "= 'Bootstrap %s ...' % arch # run bootstrap command in subprocess.call since", "name is installed on user system rel_arch = None for arch in cms_architectures():", "= fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> '", "in CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find", "dataset, block, file, run, lumi, site, user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh", "run bootstrap command in subprocess.call since it invokes # wget/curl and it can", "{190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell', 'ls", "cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options from given arg string\"\"\" opts =", "print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup command, it setups CMSSW environment and", "item.split(' ', 1) if len(split) == 1: cmd = item args = ''", "+= msg_green('arch ') \\ + ' show or switch to given CMSSW architecture,", "+= 'CMS release environment will be set for you' print_info(msg) def cmsrel(rel): \"\"\"", "os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg = '%s/%s rejected by user' %", "interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be posted as", "== ')': arg = arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]: func =", "%s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg):", "for item in entities: if arg.startswith(item + '='): startswith = item if os.path.isfile(orig_arg)", "msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for given CMSSW release\" args =", "res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res = get_tickets(arg) RESMGR.assign(res)", "from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import", "a system\" _osname, osarch = osparameters() releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']):", "osarch == 'ia32': return 'OSX/ia32 is not supported in CMSSW' return 'no match'", "+= ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in cmd_list: print_info(\"Execute %s\" %", "releases cmssh> releases list # list available CMSSW releases on given platform cmssh>", "func.func_doc break else: doc = 'Documentation is not available' else: doc = cms_help_msg()", "else: cmd = 'ls ' + orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res,", "os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS instances:' for inst in dbs_instances(): print", "+= 'please use ' + msg_green('cmsrel') + ' command and ' msg +=", "arg = arg.strip() else: flt = None startswith = None entities = \\", "\\ % (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s,", "msg_green('jobs ') \\ + ' status of job queue or CMS jobs\\n' msg", "idir) if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases:", "['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if", "-unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg = 'Bootstrap %s ...'", "print \"Installing cms+cmssw+%s ...\" % rel cmd = 'source %s; apt-get install cms+cmssw+%s'", "tickets cmssh> ticket 14 # get details for given ticket id cmssh> ticket", "instance\\n' msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \\ + 'e.g.", "entities = \\ ['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file'] for item", "print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \\", "if name.find('edm') == 0 and os.path.isfile(fname): # we use Magic(cmd).execute we don't need", "None # set pager to nill \"\"\" arg = arg.strip() if arg: if", "\"\"\" arg = arg.strip() if arg: if arg == '0' or arg ==", "% dst) else: raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command", "% (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command within CMSSW environment \"\"\"", "script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel): \"\"\" cmssh command to", "gist ticket %s' % res['html_url']) title = 'cmssh gist %s' % res['html_url'] if", "source_file target_{file,directory}\") pat = pat_se orig = src.split(' ')[-1] if os.path.exists(orig) and not", "UNIX command cmssh> du T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg) else:", "list of options: - list, which lists local transfer jobs - site, which", "idx = 0 limit = 0 debug = 0 das_client(host, query, idx, limit,", "'%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list of", "e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ + ' switch to given", "= arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi info for", "# set edm utils for given release ipython = get_ipython() rdir = '%s/bin/%s'", "'commands': cms_commands() return ipython = get_ipython() if arg[0] == '(' and arg[-1] ==", "\\ + ' cmsRun command for release in question\\n' msg += '\\nAvailable GRID", "arg not in ['0', '1']: print_error('Please provide 0/1 for debug_http command') return print_info(\"Set", "cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg = arg arg =", "install one' print msg def cms_read(arg): \"\"\" cmssh command to read provided HTML", "cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import read", "update; ' msg = 'Initialize %s apt repository ...' % arch run(cmd, sdir,", "def cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory on local filesystem or remote", "= 0 fname = arg.replace('file=', '') if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg)", "cmd = 'source %s; apt-cache search %s | grep -v -i fwlite' %", "arg == '0' or arg == 'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'):", "run(\"cp %s %s\" % (src, dst)) else: try: status = copy_lfn(orig, dst, debug,", "'0': ipth.debug = False else: ipth.debug = True # CMSSW commands def bootstrap(arch):", "<command>') msg += '\\nInstall python software: ' + \\ msg_blue('pip <search|(un)install> <package>') return", "'ok' else: msg = '%s/%s rejected by user' % (rel, arch) output.append(msg) if", "os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip()) run(cmd)", "new release, ' msg += 'since cmssh was installed with system CMSSW install", "'%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for", "' display disk usage for given site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable", "'ls %s' % sename, 'rm %s/file.root' % sename, 'ls %s' % sename, 'rm", "= '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}} res =", "name in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname):", "address\" print_error(msg) return desc = '' msg = 'Type your problem, attach traceback,", "with open(path, 'r') as demo_file: print demo_file.read() def results(): \"\"\"Return results from recent", "+ ' invoke ROOT\\n' msg += msg_green('du ') \\ + ' display disk", "print msg return # set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment", "!= -1: background = True dst = dst.replace('&', '').strip() if dst == '.':", "print name else: cms_archs = cms_architectures('all') if arg not in cms_archs: msg =", "+ '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\" % rel run(cmd)", "rm local.file or rm T3_US_Cornell:/store/user/file.root\\n' msg += msg_green('cp ') \\ + ' copy", "if releases: releases.sort() print \"\\nInstalled releases:\" for rel in releases: print rel else:", "environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir", "release):\\n' msg += msg_green('releases ') \\ + ' list available CMSSW releases, accepts", "arg.replace('block=', '') res = block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try:", "= raw_input() if uinput.strip() == 'EOF': break desc += uinput + '\\n' except", "+= msg_green('install ') \\ + ' install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg", "and provide a method to execute it in a shell \"\"\" def __init__(self,", "= os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else:", "dst) for lib in ['external', 'lib']: link = '%s/%s/%s' % (path, lib, rel_arch)", "= file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=', '') res = block_info(arg, debug)", "return # check if given release/architecture is in place status = check_release_arch(rel) if", "integration tests for cmssh\" for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn", "CMSSW environment and creates user based directory structure. Examples: cmssh> cmsrel # reset", "proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ + ' show your proxy", "T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg = arg arg = arg.strip() try:", "os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\" %", "which lists jobs of given user Examples: cmssh> jobs cmssh> jobs list cmssh>", "dst == '.': dst = os.getcwd() # check if src still has options", "print 'CMSSW architectures:' else: print 'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX')", "'info %s' % dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s'", "msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' +", "pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find %s", "Examples: cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k for k,", "which lists jobs at given site - dashboard, which lists jobs of current", "msg = '%s/%s rejected by user' % (rel, arch) output.append(msg) if output: return", "msg += msg_green('arch ') \\ + ' show or switch to given CMSSW", "HTTP debug level to %s\" % arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug", "command lists local job queue or provides information about jobs at give site", "all known CMS releases, including online, tests, etc. \"\"\" if arg: print \"CMSSW", "= os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s'", "dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst)", "cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh", "for given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip()", "read from cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname", "lib in ['external', 'lib']: link = '%s/%s/%s' % (path, lib, rel_arch) dst =", "run first: ' msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES'])", "ipython.prompt_manager.in_template = '%s|\\#> ' % rel # final message print \"%s is ready,", "don't need # to add scramv1 command in front of edm one, since", "on your system.\" msg += \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\ +", "given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh>", "entity, e.g. dataset, block, file, run. Examples: cmssh> info dataset=/a/b/c cmssh> info /a/b/c", "split = item.split(' ', 1) if len(split) == 1: cmd = item args", "rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if", "ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir)", "crab.cfg file found in %s' % crab_dir print_warning(msg) msg = 'Would you like", "dataset=/ZMM* cmssh> find file dataset=/Cosmics/CRUZET3-v1/RAW csmsh> find site dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM", "mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info command provides information for given meta-data", "+= '\\nList cmssh commands : ' + msg_blue('commands') msg += '\\ncmssh command help", "cmssh> arch # show current and installed architecture(s) cmssh> arch all # show", "= 'cmssh gist %s' % res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res) else:", "doc = func.func_doc break else: doc = 'Documentation is not available' else: doc", "os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def", "queue or provides information about jobs at give site or for given user.", "if not vdir or not arch: msg = 'Unable to identify CMSSW environment,", "% os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if arg == 'all': print name", "for lib in ['external', 'lib']: link = '%s/%s/%s' % (path, lib, rel_arch) dst", "in ['0', '1']: print_error('Please provide 0/1 for debug_http command') return print_info(\"Set HTTP debug", "'ia32': return 'OSX/ia32 is not supported in CMSSW' return 'no match' def get_apt_init(arch):", "config dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls", "return 'ok' output = [] for arch, status in get_release_arch(rel): if not status:", "to run crab command you must ' msg += 'run ' + msg_blue('cmsrel')", "'bootstrap.log', msg, debug, shell=True, call=True) cmd = 'source `find %s/%s/external/apt -name init.sh |", "local transfer jobs - site, which lists jobs at given site - dashboard,", "orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh", "raw_input('Your Email : ') if not email: msg = \"You did your email", "pat_site.match(arg): lookup(arg) else: cmd = 'du ' + arg cmd = cmd.strip() subprocess.call(cmd,", "'Fail to validate release name \"%s\"' % rel print_error(msg) msg = 'Please check", "execute(self, args=''): \"Execute given command in current shell environment\" cmd = '%s %s'", "'\\nInstalled releases: ' + msg_green(', '.join(releases)) print msg return cmd = \"eval `scramv1", "cmssh command to run ROOT within cmssh Examples: cmssh> root -l \"\"\" pcre_init", "subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup of given query in CMS data-services.", "we use Magic(cmd).execute we don't need # to add scramv1 command in front", "'\\n' print msg while True: try: uinput = raw_input() if uinput.strip() == 'EOF':", "\"rm %s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status)", "(self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given command in original shell environment\"", "-1: arg, out = orig_arg.split('>', 1) out = out.strip() arg = arg.strip() else:", "Class to be used with ipython magic functions. It holds given command and", "cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug", "pager None # set pager to nill \"\"\" arg = arg.strip() if arg:", "info /a/b/c cmssh> info run=160915 cmssh> info local_file.root Please note: to enable access", "command lookup given query in CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh> find", "break else: doc = 'Documentation is not available' else: doc = cms_help_msg() print", "except IndexError: msg = \"Given pattern '%s' does not exist on local filesystem", "pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi info for a given dataset/file/block/lfn/run", "msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' + msg_green(', '.join(releases))", "% sename, 'ls %s' % sename, 'rm file.root', 'cp %s file1.root &' %", "-v -i fwlite' % (script, rel) run(cmd) if rel.lower().find('patch') != -1: print \"Installing", "via %s function, e.g.\\n' \\ % msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg", "def cms_arch(arg=None): \"\"\" Show or set CMSSW architecture. Optional parameters either <all> or", "...' % arch # run bootstrap command in subprocess.call since it invokes #", "if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command lists", "das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug", "script def cms_install(rel): \"\"\" cmssh command to install given CMSSW release. Examples: cmssh>", "or os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg", "if not arg: return try: debug = get_ipython().debug except: debug = 0 fname", "info for a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh>", "e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check if we have stand-alone installation if", "if not desc: msg = \"You did not provide bug description\" print_error(msg) return", "verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File %s does not exists' % dst)", "commands (once you install any CMSSW release):\\n' msg += msg_green('releases ') \\ +", "string\"\"\" opts = [] for par in arg.split(): if len(par) > 0 and", "to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel = rel.strip()", "in arg: cmd = 'apt%s' % arg else: msg = 'Not supported apt", "= '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for lib", "elif pat_lfn.match(arg): arg = arg.replace('file=', '') arg = arg.replace('lfn=', '') res = file_info(arg,", "') \\ + ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\", "for a in tc_architectures(arch_type)] return archs def cms_arch(arg=None): \"\"\" Show or set CMSSW", "rel_dir)): rel_arch = arch break if not rel_arch: msg = 'Release ' +", "try: debug = get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('|')", "lists jobs of given user Examples: cmssh> jobs cmssh> jobs list cmssh> jobs", "% root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel)", "cmd_list += ['find lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}']", "rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel +", "os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for given release ipython", "command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command which queries DAS data-service", "else: flt = None startswith = None entities = \\ ['se', 'site', 'lfn',", "os.symlink(link, dst) for lib in ['external', 'lib']: link = '%s/%s/%s' % (path, lib,", "Examples: cmssh> ls # UNIX command cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya", "cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k for k, v", "%s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to execute CMSSW cmsRun", "\"\"\" Show or set CMSSW architecture. Optional parameters either <all> or <list> Examples:", "msg += 'Use ' + msg_green('install %s' % rel) msg += ' command", "wget/curl and it can be spawned into serate process, therefore # subprocess.Popen will", "'new': msg = 'You can post new ticket via web interface at\\n' msg", "\"\"\" cmssh mkdir command creates directory on local filesystem or remote CMS storage", "Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat = pat_release if not", "debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command lists local job queue or", "cms_xrdcp(arg): \"\"\" cmssh command to run ROOT xrdcp via cmssh shell Examples: cmssh>", "cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s' % sename, 'mkdir", "pager to %s\" % arg else: val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh", "command provides information for given meta-data entity, e.g. dataset, block, file, run. Examples:", "in cms_architectures('all'): if arg == 'all': print name else: if check_os(name): print name", "prompt return # check if given release name is installed on user system", "\\ + ' switch to given CMSSW release and setup its environment\\n' msg", "check if given release name is installed on user system rel_arch = None", "CMSSW install area' print msg return # check if given release/architecture is in", "cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp ls \"\"\" if", "split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh info command provides information for given", "if not arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH'] archs = [] for", "cmssh> read config.txt \"\"\" try: debug = get_ipython().debug except: debug = 0 orig_arg", "+ '\\n' print msg while True: try: uinput = raw_input() if uinput.strip() ==", "% dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn]", "print_error, print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from", "<list> Examples: cmssh> arch # show current and installed architecture(s) cmssh> arch all", "cmssh Examples: cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init", "in get_release_arch(rel): if not status: msg = '%s release is not officially supported", "'-': opts.append(par) return opts class Magic(object): \"\"\" Class to be used with ipython", "found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is not installed", "structure. Examples: cmssh> cmsrel # reset CMSSW environment to cmssh one cmssh> cmsrel", "current shell environment # old command for reference: # cmd = \"eval `scramv1", "\"eval `scramv1 runtime -sh`; %s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name)", "+= '\\nInstall python software: ' + \\ msg_blue('pip <search|(un)install> <package>') return msg def", "grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print \"While executing cmd=%s\" %", "== '&': background = True arg = arg.replace('&', '').strip() src, dst = arg.rsplit('", "'all': print 'CMSSW architectures:' else: print 'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin',", "dict): ticket = pprint.pformat(res) else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email,", "\\ % msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg += ' for r", "arg = arg.strip() if not arg or arg == 'list': print_info('Local data transfer')", "rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils import list_results, check_os, unsupported_linux, access2file from", "+= ' for r in results(): print r, type(r)\\n' msg += '\\nList cmssh", "a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find", "user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch list', 'jobs', 'ls']", "to %s\" % arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level is %s\"", "['find config dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list +=", "if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command copies", "command you must ' msg += 'run ' + msg_blue('cmsrel') + ' command'", "0 limit = 0 debug = 0 res = das_client(host, query, idx, limit,", "arg: arg = arg.strip() if not arg or arg == 'list': print_info('Local data", "a shell \"\"\" def __init__(self, cmd): self.cmd = cmd def execute(self, args=''): \"Execute", "user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok'", "(aka SCRAM_ARCH)\" archs = [a for a in tc_architectures(arch_type)] return archs def cms_arch(arg=None):", "= 'apt%s' % arg else: msg = 'Not supported apt command' raise Exception(msg)", "\"\"\" cmssh command to show or set DBS instance Examples: cmssh> dbs_instance cmssh>", "') \\ + ' install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel", "pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File %s does", "debug, shell=True, call=True) cmd = 'source `find %s/%s/external/apt -name init.sh | tail -1`;", "arg.split(' ')[-1].strip() if last_arg == '&': background = True arg = arg.replace('&', '').strip()", "+= ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list += ['find", "True else: overwrite = False except: traceback.print_exc() return try: debug = get_ipython().debug except:", "CMS storage element. Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg =", "for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2", "= '' msg = 'Type your problem, attach traceback, etc. Once done print", "not officially supported under %s' \\ % (rel, arch) print_warning(msg) if arch !=", "release architecture status: %s' % status print msg return print \"Searching for %s\"", "architecture for given CMSSW release\" args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args)", "= '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst)", "1) arg = arg.strip() else: flt = None startswith = None entities =", "%s\" % (src, dst)) else: try: status = copy_lfn(orig, dst, debug, background, overwrite)", "apt-get interactive feature if platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'],", "level to %s\" % arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level is", "arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup -path %s -arch %s'", "repository ...' % arch run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture", "jobs at give site or for given user. It accepts the following list", "local_file.root Please note: to enable access to RunSummary service please ensure that your", "= os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file: print demo_file.read()", "job queue or CMS jobs\\n' msg += msg_green('read ') \\ + ' read", "= os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try: debug = get_ipython().debug except: debug", "status)) return output def check_release_arch(rel): \"Check release/architecture\" # check if given release name", "-f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else:", "% os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup of given query in CMS", "' list file/LFN, e.g. ls local.file or ls /store/user/file.root\\n' msg += msg_green('rm ')", "= arg.strip() if pat_site.match(arg): lookup(arg) else: cmd = 'du ' + arg cmd", "'') res = block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res", "opts.append(par) return opts class Magic(object): \"\"\" Class to be used with ipython magic", "= 'sh -x %s/bootstrap.sh setup -path %s -arch %s' % (swdir, swdir, arch)", "user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok'", "all CMSSW architectures for given platform \"\"\" if not arg: print \"Current architecture:", "given command and provide a method to execute it in a shell \"\"\"", "to execute CMSSW cmsRun command. Requires cmsrel to setup CMSSW environment. \"\"\" cmd", "cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import", "current shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) run(cmd) def subprocess(self,", "sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg = 'Bootstrap %s ...' %", "if stderr: print \"While executing cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=',", "query in CMS data-services. cmssh find command lookup given query in CMS data-services.", "found in %s' % crab_dir print_warning(msg) msg = 'Would you like to create", "<search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\" cmshelp command Examples: cmssh> cmshelp cmssh>", "cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases list', 'arch", "into serate process, therefore # subprocess.Popen will not catch it run(cmd, sdir, 'bootstrap.log',", "-voms cms:/cms -key %s -cert %s\" % (key, cert) run(cmd) userdn = os.environ.get('USER_DN',", "apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket from", "item in releases: rel_arch = item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch, status))", "accessible via %s function, e.g.\\n' \\ % msg_blue('results()') msg += ' find dataset=/*Zee*\\n'", "identify CMSSW environment, please run first: ' msg = msg_red(msg) msg += msg_blue('cmsrel", "'-f': overwrite = True else: overwrite = False except: traceback.print_exc() return try: debug", "get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search %s | grep -v -i fwlite'", "FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release version and work area are set", "else: msg = 'Not supported apt command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\"", "lumi info for a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD", "= 'Unable to identify CMSSW environment, please run first: ' msg = msg_red(msg)", "' query DAS and return data in JSON format\\n' msg += msg_green('jobs ')", "'1']: print_error('Please provide 0/1 for debug_http command') return print_info(\"Set HTTP debug level to", "except: debug = 0 if not arg: print_error(\"Usage: rm <options> source_file\") dst =", "msg_green('cmsrel ') \\ + ' switch to given CMSSW release and setup its", "if dst.find('&') != -1: background = True dst = dst.replace('&', '').strip() if dst", "lfn, 'cp %s file2.root &' % lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs", "return opts class Magic(object): \"\"\" Class to be used with ipython magic functions.", "set CMSSW architecture. Optional parameters either <all> or <list> Examples: cmssh> arch #", "is not available' else: doc = cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS", "'').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug) def", "get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item in releases: rel_arch = item[0] status", "\"\"\" cmssh command to run ROOT xrdcp via cmssh shell Examples: cmssh> xrdcp", "import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import read from", "cmscrab(arg): \"\"\" Execute CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg =", "else: print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform", "-sh`; env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print \"While", "cmd = '-e -f file:///%s' % fname else: cmd = '-e -f %s'", "-1`;' % pkg_dir if not os.path.isdir(pkg_dir): cmd = '' return cmd def cms_root(arg):", "vomsinit By default it applies the following options -rfc -voms cms:/cms -key <userkey.pem>", "rel print_error(msg) msg = 'Please check the you provide correct release name,' msg", "it will be posted as anonymous gist ticket' print_info(msg) if not user_input('Proceed', default='N'):", "stderr: print \"While executing cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '')", "if not arg or arg == 'list': print_info('Local data transfer') dqueue(arg) elif arg", "command cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg", "try: status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir", "try: last_arg = arg.split(' ')[-1].strip() if last_arg == '&': background = True arg", "os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets, e.g.", "'/': cmd = '-e -f file:///%s' % fname else: cmd = '-e -f", "msg += msg_blue('EOF') + ' and hit ' + msg_blue('Enter') + '\\n' print", "local filesystem or in DBS\" % arg print_error(msg) elif pat_run.match(arg): arg = arg.replace('run=',", "elif pat_site.match(arg): site = arg.replace('site=', '') print_info('Dashboard information, site=%s' % site) res =", "ipython = get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/': cmd", "'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get update; ' msg = 'Initialize %s", "shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command", "ready, cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command within", "you must ' msg += 'run ' + msg_blue('cmsrel') + ' command' print_error(msg)", "'': print_info(\"Verbose level is %s\" % ipth.debug) else: if arg == 0 or", "files/dirs or CMS storate elements. Examples: cmssh> cp file1 file2 cmssh> cp file.root", "return ', '.join(output) osname, osarch = osparameters() if osname == 'osx' and osarch", "rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or not", "arg = arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to", "bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh", "in subprocess.call since it invokes # wget/curl and it can be spawned into", "cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import architectures as tc_architectures from", "level to %s\" % arg) # DEBUG.set(arg) # else: # print_info(\"Debug level is", "rel) fix_so(idir) print \"Create user area for %s release ...\" % rel cmsrel(rel)", "root -l \"\"\" pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init", "doc def cms_rm(arg): \"\"\" CMS rm command works with local files/dirs and CMS", "= arg.strip() debug = get_ipython().debug args = arg.split('|') if len(args) == 1: #", "release name is installed on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel)", "# show all known CMS releases, including online, tests, etc. \"\"\" if arg:", "+ ' install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\", "apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) # use subprocess due to", "\"\"\" cmssh command which queries DAS data-service with provided query. Examples: cmssh> das", "201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except: debug = 0 arg = arg.replace('dataset=',", "+= msg_green('root ') + ' invoke ROOT\\n' msg += msg_green('du ') \\ +", "idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base,", "' command to install one' print msg def cms_read(arg): \"\"\" cmssh command to", "+ '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW", "os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' %", "cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig", "release installed on your system.\" msg += \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z')", "os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION'] =", "None if arg: arg = arg.strip() if not arg or arg == 'list':", "which lists local transfer jobs - site, which lists jobs at given site", "info dataset=/a/b/c cmssh> info /a/b/c cmssh> info run=160915 cmssh> info local_file.root Please note:", "crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir)", "%s' % (swdir, swdir, arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir =", "if output: return ', '.join(output) osname, osarch = osparameters() if osname == 'osx'", "set for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup command, it setups", "cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory on local filesystem or remote CMS", "install cms+cmssw-patch+%s' % (script, rel) else: print \"Installing cms+cmssw+%s ...\" % rel cmd", "arg: arg = arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\" List available CMS", "ipth = get_ipython() if arg == '': print_info(\"Verbose level is %s\" % ipth.debug)", "except: debug = 0 orig_arg = arg if orig_arg.find('>') != -1: arg, out", "based directory structure. Examples: cmssh> cmsrel # reset CMSSW environment to cmssh one", "installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not allowed to install new", "import traceback import subprocess # cmssh modules from cmssh.iprint import msg_red, msg_green, msg_blue", "'e.g. info run=160915\\n' msg += msg_green('das ') + ' query DAS service\\n' msg", "= get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search %s | grep -v -i", "arch) cmd += 'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get update; ' msg", "rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug", "debug = 0 if not arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':') ==", "cmssh info command provides information for given meta-data entity, e.g. dataset, block, file,", "['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list += ['find lumi", "from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import das_client from cmssh.url_utils import get_data,", "-identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve", "'r') as cms_json: print cms_json.read() def integration_tests(_arg): \"Run series of integration tests for", "# list available CMSSW releases on given platform cmssh> releases all # show", "releases.sort() for rel in releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command", "' show/set DBS instance, default is DBS global instance\\n' msg += msg_green('mkdir/rmdir ')", "if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg run(cmd, shell=True) elif", "os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base =", "% user) res = jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def", "return cmd def cms_root(arg): \"\"\" cmssh command to run ROOT within cmssh Examples:", "\"\"\" if not arg: return try: debug = get_ipython().debug except: debug = 0", "= arg.replace('run=', '') res = run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=', '')", "given platform cmssh> releases all # show all known CMS releases, including online,", "releases # show installed CMSSW releases cmssh> releases list # list available CMSSW", "apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort() name =", "for item in releases: rel_arch = item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch,", "for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw'", "you provide correct release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return", "info (aka voms-proxy-info)\\n' msg += '\\nQuery results are accessible via %s function, e.g.\\n'", "architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not", "your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ + ' show your", "jobs of given user Examples: cmssh> jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD", "SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt commands\" if '-cache'", "(rel, idir)) if releases: releases.sort() print \"\\nInstalled releases:\" for rel in releases: print", "args) def cms_info(arg): \"\"\" cmssh info command provides information for given meta-data entity,", "= True arg = arg.replace('&', '').strip() src, dst = arg.rsplit(' ', 1) if", "'no match' def get_apt_init(arch): \"Return proper apt init.sh for given architecture\" apt_dir =", "message\"\"\" msg = 'Available cmssh commands:\\n' msg += msg_green('find ') \\ + '", "from cmssh.tagcollector import architectures as tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils import", "cmssh command to show or set DBS instance Examples: cmssh> dbs_instance cmssh> dbs_instance", "+= msg_green('vomsinfo ') \\ + ' show your proxy info (aka voms-proxy-info)\\n' msg", "= \"You did your email address\" print_error(msg) return desc = '' msg =", "releases list # list available CMSSW releases on given platform cmssh> releases all", "'-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd)", "`scramv1 runtime -sh`; %s\" % fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) #", "check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if", "command to source pkg environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir)", "apt-cache search %s | grep -v -i fwlite' % (script, rel) run(cmd) if", "!= os.environ['SCRAM_ARCH']: msg = 'Your SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch)", "% arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to show or set internal", "run(\"rmdir %s\" % arg) else: try: status = rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc()", "allowed for %s look-up' % startswith print_error(msg) else: cmd = 'ls ' +", "elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information, user=%s' % user) res =", "JSON data format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx =", "print '\\nAvailable DBS instances:' for inst in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp", "'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh", "JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg)", "debug, 'plain') def cms_das_json(query): \"\"\" cmssh command which queries DAS data-service with provided", "all known CMSSW architectures cmssh> arch list # show all CMSSW architectures for", "def get_release_arch(rel): \"Return architecture for given CMSSW release\" args = {'release': rel} releases", "get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: mkdir <options> dir\") if", "\"\"\" # arg = arg.strip() # if arg: # print_info(\"Set debug level to", "import RESMGR from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from", "msg = 'Please check the you provide correct release name,' msg += '", "status: %s' % status print msg return print \"Searching for %s\" % rel", "environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel =", "if len(split) == 1: cmd = item args = '' else: cmd =", "%s\" % ipth.debug) else: if arg == 0 or arg == '0': ipth.debug", "does not exist on local filesystem or in DBS\" % arg print_error(msg) elif", "ls \"\"\" if arg: if arg.strip() == 'commands': cms_commands() return ipython = get_ipython()", "jobs of current user - user, which lists jobs of given user Examples:", "not in ['0', '1']: print_error('Please provide 0/1 for debug_http command') return print_info(\"Set HTTP", "\"\"\"Return results from recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which", "import reqmgr from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options from given arg", "cmd += 'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get update; ' msg =", "proper apt init.sh for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch)", "CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel", "cmssh> cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh>", "cmd = '%s root -l %s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\"", "Set of UNIX commands, e.g. ls, cp, supported in cmssh. \"\"\" # system", "integration_tests(_arg): \"Run series of integration tests for cmssh\" for fname in ['file1.root', 'file2.root']:", "cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to execute CMSSW cmsRun command. Requires cmsrel", "user \"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility cmssh command. Examples: cmssh>", "cmd_list += ['cp %s file.root' % lfn, 'ls', 'cp file.root %s' % sename,", "release/architecture\" # check if given release name is installed on user system rel_dir", "debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for given CMSSW release\" args = {'release':", "#def debug(arg): # \"\"\" # debug shell command # \"\"\" # arg =", "else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager to %s\" % arg else:", "= {key: {'content': desc}} res = post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket", "== -1: # not a SE:dir pattern run(\"mkdir %s\" % arg) else: try:", "flt=flt) def cms_config(arg): \"\"\" Return configuration object for given dataset Examples: cmssh> config", "' \\ % (swdir, arch) cmd += 'apt-get install external+fakesystem+1.0; ' cmd +=", "(idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' %", "or <list> Examples: cmssh> arch # show current and installed architecture(s) cmssh> arch", "<options> source_file\") dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\"", "get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('>') != -1: arg,", "ticket id cmssh> ticket new # post new ticket from cmssh # or", "to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res)", "pkgs_init = '%s %s %s' % (pcre_init, gcc_init, root_init) cmd = '%s root", "'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item in cmd_list: print_info(\"Execute", "cmssh commands in current shell. Examples: cmssh> cmshelp commands \"\"\" mdict = get_ipython().magics_manager.lsmagic()", "msg += msg_green('cmsrel ') \\ + ' switch to given CMSSW release and", "-rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with", "'source `find %s/%s/external/apt -name init.sh | tail -1`; ' \\ % (swdir, arch)", "site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once you install any", "Perform lookup of given query in CMS data-services. \"\"\" arg = arg.strip() debug", "storage element. Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip()", "command, ' \\ + 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls", "print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n' msg +=", "with ipython magic functions. It holds given command and provide a method to", "if arg == '': print_info(\"Verbose level is %s\" % ipth.debug) else: if arg", "\"While executing cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst =", "'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel): \"\"\" cmssh command", "local filesystem or remote CMS storage element. Examples: cmssh> mkdir foo cmssh> mkdir", "= run_lumi_info(arg, debug) def cms_json(arg): \"Print or set location of CMS JSON file\"", "get_release_arch(rel): if not status: msg = '%s release is not officially supported under", "out, debug) def cms_releases(arg=None): \"\"\" List available CMS releases. Optional parameters either <list>", "'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst =", "sename, 'ls %s' % sename, 'rm file.root', 'cp %s file1.root &' % lfn,", "+= msg_green('cp ') \\ + ' copy file/LFN, e.g. cp local.file or cp", "else: val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is set to: %s\"", "print_info('Aborting your action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files =", "\"\"\"Extract options from given arg string\"\"\" opts = [] for par in arg.split():", "cmssh command to show or set internal pager Examples: cmssh> pager # shows", "an alias to: eval `scramv1 runtime -sh`' msg = 'Within cmssh it is", "+ ' CMSSW scram command\\n' msg += msg_green('cmsRun ') \\ + ' cmsRun", "with local files/dirs and CMS storate elements. Examples: cmssh> rm local_file cmssh> rm", "msg_green('rm ') + ' remove file/LFN, ' \\ + 'e.g. rm local.file or", "pat_release.match(arg): arg = arg.replace('release=', '') res = release_info(arg, debug) elif startswith: msg =", "'&': background = True arg = arg.replace('&', '').strip() src, dst = arg.rsplit(' ',", "cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug = get_ipython().debug except: debug", "0)) def cms_find(arg): \"\"\" Perform lookup of given query in CMS data-services. cmssh", "print \"Create user area for %s release ...\" % rel cmsrel(rel) def cmsenv(_arg):", "'.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms -key", "if os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd =", "os.path.isfile(fname): # we use Magic(cmd).execute we don't need # to add scramv1 command", "cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp ls \"\"\" if arg: if arg.strip()", "Execute given command within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch =", "= '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path", "\"%s\"' % rel print_error(msg) msg = 'Please check the you provide correct release", "else: overwrite = False except: traceback.print_exc() return try: debug = get_ipython().debug except: debug", "any CMSSW release):\\n' msg += msg_green('releases ') \\ + ' list available CMSSW", "== 'list': if arg == 'all': print 'CMSSW architectures:' else: print 'CMSSW architectures", "sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for given CMSSW release\" args", "Examples: cmssh> dbs_instance cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if arg: if", "queries DAS data-service with provided query. Examples: cmssh> das dataset=/ZMM* \"\"\" host =", "in releases: rel_arch = item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch, status)) return", "CMS data-services. cmssh find command lookup given query in CMS data-services. Examples: cmssh>", "cms_json: print cms_json.read() def integration_tests(_arg): \"Run series of integration tests for cmssh\" for", "dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s' % sename,", "= '-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent',", "'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir = os.path.join(path,", "to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS instances:' for", "else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File", "(swdir, arch) cmd += 'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get update; '", "= [] try: debug = get_ipython().debug except: debug = 0 orig_arg = arg", "list_se(arg, debug) elif pat_site.match(arg): arg = arg.replace('site=', '') res = site_info(arg, debug) elif", "Magic(object): \"\"\" Class to be used with ipython magic functions. It holds given", "of given query in CMS data-services. \"\"\" arg = arg.strip() debug = get_ipython().debug", "= get_ipython() rel = rel.strip() if not rel or rel in ['reset', 'clear',", "`scramv1 runtime -sh`; %s\" % cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute", "jobs user=my_cms_user_name \"\"\" res = None try: debug = get_ipython().debug except: debug =", "if arg == 'new': msg = 'You can post new ticket via web", "% lfn] cmd_list += ['ls %s' % dataset, 'info %s' % dataset] cmd_list", "import glob import shutil import base64 import pprint import mimetypes import traceback import", "= rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File %s does not exists'", "cmssh> ls run=160915 \"\"\" arg = arg.strip() res = [] try: debug =", "verbose(arg): \"\"\" Set/get verbosity level \"\"\" arg = arg.strip() ipth = get_ipython() if", "%s -cert %s\" % (key, cert) run(cmd) userdn = os.environ.get('USER_DN', '') if not", "cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh>", "%s' % res['html_url']) title = 'cmssh gist %s' % res['html_url'] if isinstance(res, dict):", "level is %s\" % ipth.debug) else: if arg == 0 or arg ==", "root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE']", "pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector", "ls /store/user/file.root\\n' msg += msg_green('rm ') + ' remove file/LFN, ' \\ +", "must ' msg += 'run ' + msg_blue('cmsrel') + ' command' print_error(msg) return", "architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort() name", "+ ' remove file/LFN, ' \\ + 'e.g. rm local.file or rm T3_US_Cornell:/store/user/file.root\\n'", "-sh`; %s\" % cmd run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB command,", "= arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug) def cms_json(arg): \"Print or set", "status in get_release_arch(rel): if not status: msg = '%s release is not officially", "= os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg =", "import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import das_client from", "'-get' in arg: cmd = 'apt%s' % arg else: msg = 'Not supported", "T3_US_Cornell'] cmd_list += ['ls %s' % sename, 'mkdir %s/foo' % sename, 'ls %s'", "behalf of the user Examples: cmssh> vomsinit By default it applies the following", "<options> source_file target_{file,directory}\") pat = pat_se orig = src.split(' ')[-1] if os.path.exists(orig) and", "get_dashboardname def options(arg): \"\"\"Extract options from given arg string\"\"\" opts = [] for", "'Your SCRAM_ARCH=%s, while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s", "elif arg == 'all' or arg == 'list': if arg == 'all': print", "releases: ' + msg_green(', '.join(releases)) print msg return cmd = \"eval `scramv1 runtime", "in a shell \"\"\" def __init__(self, cmd): self.cmd = cmd def execute(self, args=''):", "is ready, cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command", "it in a shell \"\"\" def __init__(self, cmd): self.cmd = cmd def execute(self,", "debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rmdir <options>", "crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def", "args) output = [] for item in releases: rel_arch = item[0] status =", "subprocess.Popen will not catch it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd", "command works with local files/dirs and CMS storate elements. Examples: cmssh> rm local_file", "%s file1.root &' % lfn, 'cp %s file2.root &' % lfn2, 'ls'] cmd_list", "cmssh tickets, e.g. Examples: cmssh> tickets # list all cmssh tickets cmssh> ticket", "if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'),", "if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return", "from cmssh.utils import osparameters, check_voms_proxy, run, user_input from cmssh.utils import execmd, touch, platform,", "type(r)\\n' msg += '\\nList cmssh commands : ' + msg_blue('commands') msg += '\\ncmssh", "== 0 and os.path.isfile(fname): # we use Magic(cmd).execute we don't need # to", "arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat = pat_se orig = src.split(' ')[-1]", "ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def cms_cp(arg): \"\"\" cmssh cp command copies local", "%s' % res['html_url'] if isinstance(res, dict): ticket = pprint.pformat(res) else: ticket = res", "architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if arg", "'\\n%s/%s is not installed within cmssh, proceed' \\ % (rel, arch) if user_input(msg,", "= 'cp %s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" % (src,", "results(): print r, type(r)\\n' msg += '\\nList cmssh commands : ' + msg_blue('commands')", "def cms_jobs(arg=None): \"\"\" cmssh jobs command lists local job queue or provides information", "osparameters() if osname == 'osx' and osarch == 'ia32': return 'OSX/ia32 is not", "cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup of given query in CMS", "% dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s'", "import crab_submit_remotely, crabconfig from cmssh.cern_html import read from cmssh.dashboard import jobsummary from cmssh.reqmgr", "'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path,", "list_results(res, debug=True, flt=flt) def cms_jobs(arg=None): \"\"\" cmssh jobs command lists local job queue", "os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir os.symlink(link,", "site, dataset, block, run, release, file). Examples: cmssh> ls # UNIX command cmssh>", "def dbs_instance(arg=None): \"\"\" cmssh command to show or set DBS instance Examples: cmssh>", "msg += msg_green('vomsinfo ') \\ + ' show your proxy info (aka voms-proxy-info)\\n'", "T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug = 0", "import re import sys import time import json import glob import shutil import", "lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 =", "run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0'] cmd_list", "fname = os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname): # we use", "help : ' + msg_blue('cmshelp <command>') msg += '\\nInstall python software: ' +", "'ls T3_US_Cornell'] cmd_list += ['ls %s' % sename, 'mkdir %s/foo' % sename, 'ls", "check_release_arch(rel): \"Check release/architecture\" # check if given release name is installed on user", "'arch list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS'] mgr = get_ipython() for item", "or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg = 'You", "= dataset_info(arg, debug) except IndexError: msg = \"Given pattern '%s' does not exist", "area' print msg return # check if given release/architecture is in place status", "Examples: cmssh> arch # show current and installed architecture(s) cmssh> arch all #", "`scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr:", "msg_green('releases ') \\ + ' list available CMSSW releases, accepts <list|all> args\\n' msg", "provided HTML page (by default output dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile", "show or set internal pager Examples: cmssh> pager # shows current setting cmssh>", "userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user})", "\"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0", "dataset=%s' % dataset, 'ls run=%s' % run, 'ls file=%s' % lfn] cmd_list +=", "cmssh> info /a/b/c cmssh> info run=160915 cmssh> info local_file.root Please note: to enable", "run, user_input from cmssh.utils import execmd, touch, platform, fix_so from cmssh.cmsfs import dataset_info,", "default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg has been", "+ ' mkdir/rmdir command, ' \\ + 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n'", "cmssh command to read provided HTML page (by default output dumps via pager)", "'') res = run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=', '') res =", "arg = arg.strip() res = [] try: debug = get_ipython().debug except: debug =", "0 and os.path.isfile(fname): # we use Magic(cmd).execute we don't need # to add", "', 1) if dst.find('&') != -1: background = True dst = dst.replace('&', '').strip()", "= 0 limit = 0 debug = 0 res = das_client(host, query, idx,", "r in res] releases = list(set(releases)) releases.sort() for rel in releases: print rel", "% time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content': desc}} res = post_ticket(key, files)", "os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s'", "traceback, etc. Once done print ' msg += msg_blue('EOF') + ' and hit", "\\ + ' list available CMSSW releases, accepts <list|all> args\\n' msg += msg_green('install", "= '-e -f file:///%s' % fname else: cmd = '-e -f %s' %", "cmd = 'ls ' + orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=',", "+ ' copy file/LFN, e.g. cp local.file or cp /store/user/file.root .\\n' msg +=", "arg == 0 or arg == '0': ipth.debug = False else: ipth.debug =", "CMSSW environment. \"\"\" cmd = 'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\"", "command which executes voms-proxy-init on behalf of the user Examples: cmssh> vomsinit By", "email address\" print_error(msg) return desc = '' msg = 'Type your problem, attach", "% lfn] cmd_list += ['find lumi dataset=%s' % dataset, 'find lumi {\"190704\":[1,2,3]}', 'find", "print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory on local", "'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be posted as anonymous gist ticket' print_info(msg)", "'').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else:", "msg = 'No crab.cfg file found in %s' % crab_dir print_warning(msg) msg =", "os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\" # debug shell command # \"\"\"", "run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704", "in archs: print item elif arg == 'all' or arg == 'list': if", "\\ % os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if arg == 'all': print", "file). Examples: cmssh> ls # UNIX command cmssh> ls -l local_file cmssh> ls", "= 0 debug = 0 res = das_client(host, query, idx, limit, debug, 'json')", "Perform lookup of given query in CMS data-services. cmssh find command lookup given", "is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) #", "cmssh command to execute CMSSW cmsRun command. Requires cmsrel to setup CMSSW environment.", "') \\ + ' read URL/local file content\\n' msg += msg_green('root ') +", "and hit ' + msg_blue('Enter') + '\\n' print msg while True: try: uinput", "= arg.replace('site=', '') res = site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=', '')", "False else: ipth.debug = True # CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\"", "return data in JSON format\\n' msg += msg_green('jobs ') \\ + ' status", "arg == 'new': msg = 'You can post new ticket via web interface", "def cms_du(arg): \"\"\" cmssh disk utility cmssh command. Examples: cmssh> du # UNIX", "'No crab.cfg file found in %s' % crab_dir print_warning(msg) msg = 'Would you", "available' else: doc = cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS rm command", "if arg: arg = arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\" List available", "lumi run=190704 cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except: debug", "and it can be spawned into serate process, therefore # subprocess.Popen will not", "'file'] for item in entities: if arg.startswith(item + '='): startswith = item if", "msg_green('root ') + ' invoke ROOT\\n' msg += msg_green('du ') \\ + '", "find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find user=oliver", "setup -path %s -arch %s' % (swdir, swdir, arch) if unsupported_linux(): cmd +=", "= pat_release if not pat.match(rel): msg = 'Fail to validate release name \"%s\"'", "os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file: print demo_file.read() def", "# set pager to nill \"\"\" arg = arg.strip() if arg: if arg", "Requires cmsrel to setup CMSSW environment. \"\"\" cmd = 'cmsRun %s' % arg", "init.sh | tail -1`; ' \\ % (swdir, arch) cmd += 'apt-get install", "in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh commands:\\n'", "dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' % lfn] cmd_list += ['find lumi dataset=%s'", "rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch break", "') \\ + ' cmsRun command for release in question\\n' msg += '\\nAvailable", "architecture status: %s' % status print msg return print \"Searching for %s\" %", "'(' and arg[-1] == ')': arg = arg[1:-1] for case in [arg, 'cms_'+arg,", "= os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh'", "print msg def dbs_instance(arg=None): \"\"\" cmssh command to show or set DBS instance", "returns results in JSON data format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host =", "dbs_instances, tc_url from cmssh.das import das_client from cmssh.url_utils import get_data, send_email from cmssh.regex", "' read URL/local file content\\n' msg += msg_green('root ') + ' invoke ROOT\\n'", "command for release in question\\n' msg += '\\nAvailable GRID commands: <cmd> either grid", "not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in %s' % crab_dir print_warning(msg)", "to run ROOT within cmssh Examples: cmssh> root -l \"\"\" pcre_init = pkg_init('external/pcre')", "get_ipython() if arg[0] == '(' and arg[-1] == ')': arg = arg[1:-1] for", "return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which lists all registered cmssh commands", "\"\"\" # debug shell command # \"\"\" # arg = arg.strip() # if", "if func: doc = func.func_doc break else: doc = 'Documentation is not available'", "# old command for reference: # cmd = \"eval `scramv1 runtime -sh`; %s\"", "rel + '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\" % rel", "print_warning(msg) msg = 'Would you like to create one' if user_input(msg, default='N'): with", "look-up' % startswith print_error(msg) else: cmd = 'ls ' + orig_arg run(cmd, shell=True)", "if arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg) else: fname", "\"\"\" cmssh command which queries DAS data-service with provided query and returns results", "\"\"\" mdict = get_ipython().magics_manager.lsmagic() cmds = [k for k, v in mdict['line'].items() if", "msg = 'Fail to validate release name \"%s\"' % rel print_error(msg) msg =", "if not arg: print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1] if os.path.exists(dst) or", "default='N'): print_info('Aborting your action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files", "if given release name is installed on user system rel_dir = '%s/cms/cmssw/%s' %", "command to list available releases.\\n' msg += 'Use ' + msg_green('install %s' %", "= stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst):", "os.path.join(reldir, name) if name.find('edm') == 0 and os.path.isfile(fname): # we use Magic(cmd).execute we", "original shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def", "cmd += 'apt-get update; ' msg = 'Initialize %s apt repository ...' %", "shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path =", "= arg.strip() if not arg or arg == 'list': print_info('Local data transfer') dqueue(arg)", "False orig_arg = arg arg = arg.strip() try: last_arg = arg.split(' ')[-1].strip() if", "cmssh.utils import execmd, touch, platform, fix_so from cmssh.cmsfs import dataset_info, block_info, file_info, site_info,", "= 'source `find %s -name init.sh | tail -1`;' % pkg_dir if not", "# arg = arg.strip() # if arg: # print_info(\"Set debug level to %s\"", "os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' + msg_green(', '.join(releases)) print msg return cmd", "Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug", "still has options and user asked for -f options = src.split(' ') if", "= 'source %s; apt-cache search %s | grep -v -i fwlite' % (script,", "and name.find('.') == -1: archs.append(name) if archs: print '\\nInstalled architectures:' for item in", "os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup of given query in CMS data-services.", "\"\"\" # system modules import os import re import sys import time import", "dst) # set edm utils for given release ipython = get_ipython() rdir =", "yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory from local file system", "du T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg) else: cmd = 'du", "lumi run=190704 cmssh> find user=oliver List of supported entities: dataset, block, file, run,", "= cms_architectures('all') if arg not in cms_archs: msg = 'Wrong architecture, please choose", "key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s\" %", "(self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list of releases installed on", "Examples: cmssh> lumi run=190704 cmssh> lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root", "via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it will be", "desc = '' msg = 'Type your problem, attach traceback, etc. Once done", "user system rel_arch = None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' %", "%s' % rel) msg += ' command to install given release.' print msg", "os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path", "unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg", "') \\ + ' provides detailed info about given CMS entity, ' \\", "def debug_http(arg): \"\"\" Show or set HTTP debug flag. Default is 0. \"\"\"", "user. It accepts the following list of options: - list, which lists local", "given user. It accepts the following list of options: - list, which lists", "if userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res = jobsummary({'user':", "sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd = 'source `find %s/%s/external/apt -name init.sh", "^ROOTSYS=' stdout, stderr = execmd(cmd) if stderr: print \"While executing cmd=%s\" % cmd", "os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#>", "import sys import time import json import glob import shutil import base64 import", "msg_blue('Enter') + '\\n' print msg while True: try: uinput = raw_input() if uinput.strip()", "arg[-1] == ')': arg = arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]: func", "= src.split(' ') if len(options) > 1 and options[0] == '-f': overwrite =", "\"\"\" cmssh command which executes voms-proxy-init on behalf of the user Examples: cmssh>", "\"\"\" arg = arg.strip() if pat_site.match(arg): lookup(arg) else: cmd = 'du ' +", "arg.split('|') if len(args) == 1: # no filter res = CMSMGR.lookup(arg) else: gen", "for given meta-data entity, e.g. dataset, block, file, run. Examples: cmssh> info dataset=/a/b/c", "cmssh.utils import osparameters, check_voms_proxy, run, user_input from cmssh.utils import execmd, touch, platform, fix_so", "\"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\ + ' command to install one'", "shell=True) # use subprocess due to apt-get interactive feature if platform() == 'osx':", "'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user", "debug and access2file(fname): with open(fname, 'r') as cms_json: print cms_json.read() def integration_tests(_arg): \"Run", "\"Return list of CMSSW architectures (aka SCRAM_ARCH)\" archs = [a for a in", "['reset', 'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir", "cmssh release setup command, it setups CMSSW environment and creates user based directory", "find user=oliver List of supported entities: dataset, block, file, run, lumi, site, user", "arg = arg.strip() try: debug = get_ipython().debug except: debug = 0 if not", "% arg) else: try: status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg):", "not required to use cmsenv\\n' msg += 'please use ' + msg_green('cmsrel') +", "system rel_arch = None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch,", "= \"Given pattern '%s' does not exist on local filesystem or in DBS\"", "we have stand-alone installation if os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not allowed", "Return lumi info for a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh> lumi", "\"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt commands\"", "yet CMSSW release installed on your system.\" msg += \"\\nPlease use \" +", "print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd =", "switch to given CMSSW architecture, accept <list|all> args\\n' msg += msg_green('scram ') +", "installed on your system.\\n' msg += 'Use ' + msg_green('releases') msg += '", "% (swdir, arch) cmd += 'apt-get install external+fakesystem+1.0; ' cmd += 'apt-get update;", "rel_arch = None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel)", "to given CMSSW architecture, accept <list|all> args\\n' msg += msg_green('scram ') + '", "cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg = arg.strip() try: debug", "= \"rm %s\" % arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug)", "set internal pager Examples: cmssh> pager # shows current setting cmssh> pager None", "= arg if orig_arg.find('|') != -1: arg, flt = orig_arg.split('|', 1) arg =", "rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel in os.listdir(rdir):", "location of CMS JSON file\" if arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON:", "'find dataset file=%s' % lfn] cmd_list += ['find lumi dataset=%s' % dataset, 'find", "cmd = 'apt%s' % arg else: msg = 'Not supported apt command' raise", "default output dumps via pager) Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh>", "copy file/LFN, e.g. cp local.file or cp /store/user/file.root .\\n' msg += msg_green('info ')", "if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\" % rel cmd = 'source", "install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ + ' switch to given CMSSW", "= arg.strip() if arg: if arg == '0' or arg == 'None' or", "creates directory on local filesystem or remote CMS storage element. Examples: cmssh> mkdir", "% fname else: cmd = '-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if", "is %s\" % ipth.debug) else: if arg == 0 or arg == '0':", "shell \"\"\" def __init__(self, cmd): self.cmd = cmd def execute(self, args=''): \"Execute given", "['0', '1']: print_error('Please provide 0/1 for debug_http command') return print_info(\"Set HTTP debug level", "print_error(msg) return # check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg =", "given release ipython = get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir =", "please choose from the following list\\n' msg += ', '.join(cms_archs) raise Exception(msg) print", "or CMS storate elements. Examples: cmssh> cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name", "= \"voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s\" % (key, cert) run(cmd)", "bug description\" print_error(msg) return if not user_input('Send this ticket', default='N'): print_info('Aborting your action')", "msg return print \"Searching for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd =", "or CMS entities (se, site, dataset, block, run, release, file). Examples: cmssh> ls", "cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname def options(arg):", "read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug = get_ipython().debug", "if arg: print \"CMSSW releases for %s platform\" % platform() res = release_info(release=None,", "\"Return architecture for given CMSSW release\" args = {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'),", "os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is set to: %s\" % val print", "CMSSW release and setup its environment\\n' msg += msg_green('arch ') \\ + '", "%s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" % (src, dst)) else:", "It accepts the following list of options: - list, which lists local transfer", "CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + ' show/set DBS instance,", "debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: mkdir <options>", "# cmd = \"eval `scramv1 runtime -sh`; %s\" % fname cmd = fname", "Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug", "1: cmd = item args = '' else: cmd = split[0] args =", "\"\"\" cmssh command to execute CMSSW cmsRun command. Requires cmsrel to setup CMSSW", "= 0 debug = 0 das_client(host, query, idx, limit, debug, 'plain') def cms_das_json(query):", "# get ROOT from run-time environment cmd = 'eval `scramv1 runtime -sh`; env", "args\\n' msg += msg_green('install ') \\ + ' install CMSSW release, e.g. install", "to show or set internal pager Examples: cmssh> pager # shows current setting", "orig_arg.split('>', 1) out = out.strip() arg = arg.strip() else: out = None if", "' provides detailed info about given CMS entity, ' \\ + 'e.g. info", "release name \"%s\"' % rel print_error(msg) msg = 'Please check the you provide", "HTTP debug flag. Default is 0. \"\"\" arg = arg.strip() if arg: if", "out = out.strip() arg = arg.strip() else: out = None if arg: arg", "= os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' + msg_green(', '.join(releases)) print msg return", "msg = '\\nCheck release architecture status: %s' % status print msg return print", "ROOT from run-time environment cmd = 'eval `scramv1 runtime -sh`; env | grep", "level \"\"\" arg = arg.strip() ipth = get_ipython() if arg == '': print_info(\"Verbose", "except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka SCRAM_ARCH)\" archs =", "= 'Wrong architecture, please choose from the following list\\n' msg += ', '.join(cms_archs)", "def cms_root(arg): \"\"\" cmssh command to run ROOT within cmssh Examples: cmssh> root", "if not pat.match(rel): msg = 'Fail to validate release name \"%s\"' % rel", "cmssh one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel = rel.strip() if", "cms_find(arg): \"\"\" Perform lookup of given query in CMS data-services. cmssh find command", "(path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst) else:", "\"DBS instance is set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg print", "post_ticket from cmssh.cms_urls import dbs_instances, tc_url from cmssh.das import das_client from cmssh.url_utils import", "find command lookup given query in CMS data-services. Examples: cmssh> find dataset=/ZMM* cmssh>", "DBS instances:' for inst in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg", "arg: print_error(\"Usage: rm <options> source_file\") dst = arg.split()[-1] if os.path.exists(dst) or len(glob.glob(dst)): cmd", "else: doc = cms_help_msg() print doc def cms_rm(arg): \"\"\" CMS rm command works", "= arg.replace('user=', '') print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) if", "= False orig_arg = arg arg = arg.strip() try: last_arg = arg.split(' ')[-1].strip()", "try: uinput = raw_input() if uinput.strip() == 'EOF': break desc += uinput +", "= 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel): \"\"\" cmssh", "in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func: doc = func.func_doc break", "' % prompt return # check if given release name is installed on", "cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r')", "jobsummary from cmssh.reqmgr import reqmgr from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options", "arg.replace('&', '').strip() src, dst = arg.rsplit(' ', 1) if dst.find('&') != -1: background", "provided query. Examples: cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0", "['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root'", "\\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run", "cmssh> du # UNIX command cmssh> du T3_US_Cornell \"\"\" arg = arg.strip() if", "releases.\\n' msg += 'Use ' + msg_green('install %s' % rel) msg += '", "block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res = dataset_info(arg, debug)", "cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel = rel.strip() if not rel or", "arg == 'list': if arg == 'all': print 'CMSSW architectures:' else: print 'CMSSW", "user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res = jobsummary({'user': user}) elif", "# show all known CMSSW architectures cmssh> arch list # show all CMSSW", "with provided query. Examples: cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx =", "rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' %", "' + msg_green('install %s' % rel) msg += ' command to install given", "'' msg = 'Type your problem, attach traceback, etc. Once done print '", "arg, flt = orig_arg.split('|', 1) arg = arg.strip() else: flt = None if", "CMS cmsenv is an alias to: eval `scramv1 runtime -sh`' msg = 'Within", "to given CMSSW release and setup its environment\\n' msg += msg_green('arch ') \\", "T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background =", "pkg in pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' %", "run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst):", "orig_arg = arg if orig_arg.find('|') != -1: arg, flt = orig_arg.split('|', 1) arg", "os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file: print demo_file.read() def results(): \"\"\"Return results", "tickets # list all cmssh tickets cmssh> ticket 14 # get details for", "# DEBUG.set(arg) # else: # print_info(\"Debug level is %s\" % DEBUG.level) def debug_http(arg):", "= mimetypes.guess_type(arg) if mtype[0]: print \"Mime type:\", mtype[0] ipython = get_ipython() magic =", "rm command works with local files/dirs and CMS storate elements. Examples: cmssh> rm", "of supported entities: dataset, block, file, run, lumi, site, user \"\"\" lookup(arg) def", "ls # UNIX command cmssh> ls -l local_file cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls", "storate elements. Examples: cmssh> rm local_file cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root", "results in JSON data format Examples: cmssh> das_json dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch'", "info command provides information for given meta-data entity, e.g. dataset, block, file, run.", "') \\ + ' status of job queue or CMS jobs\\n' msg +=", "if check_os(name) and name.find('.') == -1: archs.append(name) if archs: print '\\nInstalled architectures:' for", "\\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area for %s release", "commands : ' + msg_blue('commands') msg += '\\ncmssh command help : ' +", "will be set for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup command,", "run=160915 \"\"\" arg = arg.strip() res = [] try: debug = get_ipython().debug except:", "if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to %s DBS instance\" % arg", "list file/LFN, e.g. ls local.file or ls /store/user/file.root\\n' msg += msg_green('rm ') +", "1) if dst.find('&') != -1: background = True dst = dst.replace('&', '').strip() if", "Examples: cmssh> tickets # list all cmssh tickets cmssh> ticket 14 # get", "else: print \"Installing cms+cmssw+%s ...\" % rel cmd = 'source %s; apt-get install", "(swdir, swdir, arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW')", "shell environment\" cmd = '%s %s' % (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''):", "== 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh;", "pat_run.match(arg): arg = arg.replace('run=', '') res = run_info(arg, debug) elif pat_release.match(arg): arg =", "as anonymous gist ticket' print_info(msg) if not user_input('Proceed', default='N'): return email = raw_input('Your", "os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION'] = rel", "%s/foo' % sename, 'ls %s' % sename, ] cmd_list += ['cp %s file.root'", "'No pattern is allowed for %s look-up' % startswith print_error(msg) else: cmd =", "arch = os.environ.get('SCRAM_ARCH', None) if not vdir or not arch: msg = 'Unable", "if given release name is installed on user system rel_arch = None for", "print msg def cms_read(arg): \"\"\" cmssh command to read provided HTML page (by", "list_results, check_os, unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy, run, user_input from cmssh.utils", "rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug =", "setting cmssh> pager None # set pager to nill \"\"\" arg = arg.strip()", "debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: cp <options>", "in current shell environment # old command for reference: # cmd = \"eval", "in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir)", "ensure that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return", "for name in os.listdir(os.environ['VO_CMS_SW_DIR']): if check_os(name) and name.find('.') == -1: archs.append(name) if archs:", "= item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check release/architecture\" #", "raw_input() if uinput.strip() == 'EOF': break desc += uinput + '\\n' except KeyboardInterrupt:", "= get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rmdir <options> dir\")", "= os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s'", "CMS releases. Optional parameters either <list> or <all> Examples: cmssh> releases # show", "def subprocess(self, args=''): \"Execute given command in original shell environment\" cmd = '%s", "else: cmd = 'du ' + arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def", "'clear', 'clean']: path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir =", "link = '%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib)", "prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return # check", "msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, ' \\ + 'e.g. mkdir /path/foo or", "for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort() for key in cmds: print", "lfn] cmd_list += ['ls %s' % dataset, 'info %s' % dataset] cmd_list +=", "if uinput.strip() == 'EOF': break desc += uinput + '\\n' except KeyboardInterrupt: break", "msg_blue('commands') msg += '\\ncmssh command help : ' + msg_blue('cmshelp <command>') msg +=", "accepts the following list of options: - list, which lists local transfer jobs", "rel_arch = arch break if not rel_arch: msg = 'Release ' + msg_red(rel)", "import get_dashboardname def options(arg): \"\"\"Extract options from given arg string\"\"\" opts = []", "release name is installed on user system rel_arch = None for arch in", "crab command' print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin' and arg", "'/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya'", "= rel.strip() pat = pat_release if not pat.match(rel): msg = 'Fail to validate", "file.root %s' % sename, 'ls %s' % sename, 'rm %s/file.root' % sename, 'ls", "'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout, stderr = execmd(cmd) if", "+= ['find config dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list", "'ok' output = [] for arch, status in get_release_arch(rel): if not status: msg", "order to run crab command you must ' msg += 'run ' +", "= '%s/install/lib/release_root' % root if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst)", "debug) def cms_releases(arg=None): \"\"\" List available CMS releases. Optional parameters either <list> or", "-arch %s' % (swdir, swdir, arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir", "msg += msg_green('vomsinit ') \\ + ' setup your proxy (aka voms-proxy-init)\\n' msg", "cmssh> pager None # set pager to nill \"\"\" arg = arg.strip() if", "\"Print a list of releases installed on a system\" _osname, osarch = osparameters()", "+ msg_green('cmsrel') + ' command and ' msg += 'CMS release environment will", "| tail -1`; ' \\ % (swdir, arch) cmd += 'apt-get install external+fakesystem+1.0;", "os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command within CMSSW environment \"\"\" vdir =", "implemented yet') def cms_rmdir(arg): \"\"\" cmssh rmdir command removes directory from local file", "list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration object for given dataset Examples:", "msg, debug, shell=True, call=True) cmd = 'source `find %s/%s/external/apt -name init.sh | tail", "dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path, 'bin'), arg.strip())", "msg_green('info ') \\ + ' provides detailed info about given CMS entity, '", "output = [] for arch, status in get_release_arch(rel): if not status: msg =", "if os.path.exists(dst): if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils", "\"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path,", "\"%s is ready, cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given", "msg += 'Use ' + msg_green('releases') msg += ' command to list available", "== '(' and arg[-1] == ')': arg = arg[1:-1] for case in [arg,", "res = jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information, user=%s'", "use \" + msg_green('install CMSSW_X_Y_Z') \\ + ' command to install one' print", "print_error(\"Usage: mkdir <options> dir\") if arg.find(':') == -1: # not a SE:dir pattern", "$CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to execute", "for %s look-up' % startswith print_error(msg) else: cmd = 'ls ' + orig_arg", "not allowed to install new release, ' msg += 'since cmssh was installed", "info about given CMS entity, ' \\ + 'e.g. info run=160915\\n' msg +=", "osname, osarch = osparameters() if osname == 'osx' and osarch == 'ia32': return", "gist ticket' print_info(msg) if not user_input('Proceed', default='N'): return email = raw_input('Your Email :", "environment. \"\"\" cmd = 'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh", "get_ipython().debug except: debug = 0 if debug and access2file(fname): with open(fname, 'r') as", "cmssh commands:\\n' msg += msg_green('find ') \\ + ' search CMS meta-data (query", "command to run ROOT within cmssh Examples: cmssh> root -l \"\"\" pcre_init =", "cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res = None", "cmd = 'sh -x %s/bootstrap.sh setup -path %s -arch %s' % (swdir, swdir,", "cmsrel(rel): \"\"\" cmssh release setup command, it setups CMSSW environment and creates user", "if len(par) > 0 and par[0] == '-': opts.append(par) return opts class Magic(object):", "' CMSSW scram command\\n' msg += msg_green('cmsRun ') \\ + ' cmsRun command", "= get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item in releases: rel_arch = item[0]", "dashboard cmssh> jobs user=my_cms_user_name \"\"\" res = None try: debug = get_ipython().debug except:", "Examples: cmssh> vomsinit By default it applies the following options -rfc -voms cms:/cms", "else: cmd = '-e -f %s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if", "pcre_init = pkg_init('external/pcre') gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s %s", "from cmssh.cern_html import read from cmssh.dashboard import jobsummary from cmssh.reqmgr import reqmgr from", "elif pat_block.match(arg): arg = arg.replace('block=', '') res = block_info(arg, debug) elif pat_dataset.match(arg): arg", "orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '') res = list_se(arg, debug)", "' for r in results(): print r, type(r)\\n' msg += '\\nList cmssh commands", "not email: msg = \"You did your email address\" print_error(msg) return desc =", "1) arg = arg.strip() else: flt = None if arg: arg = arg.strip()", "one cmssh> cmsrel CMSSW_5_2_4 \"\"\" ipython = get_ipython() rel = rel.strip() if not", "= item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls ' + orig_arg run(cmd,", "jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration", "idir in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir):", "tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR, working_pem from cmssh.cmssw_utils import", "prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel # final message print \"%s is", "def cms_rm(arg): \"\"\" CMS rm command works with local files/dirs and CMS storate", "cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list += ['ls", "(os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for arch, status", "'run', 'release', 'file'] for item in entities: if arg.startswith(item + '='): startswith =", "CMS storate elements. Examples: cmssh> cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh>", "import msg_red, msg_green, msg_blue from cmssh.iprint import print_warning, print_error, print_status, print_info from cmssh.filemover", "')[-1].strip() if last_arg == '&': background = True arg = arg.replace('&', '').strip() src,", "environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd = 'source `find", "your problem, attach traceback, etc. Once done print ' msg += msg_blue('EOF') +", "%s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search %s", "\"\"\" cmssh ls command lists local files/dirs/CMS storate elements or CMS entities (se,", "for idir in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if", "post new ticket from cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if", "run(cmd, shell=True, call=True) def cmscrab(arg): \"\"\" Execute CRAB command, help is available at", "\" + msg_green('install CMSSW_X_Y_Z') \\ + ' command to install one' print msg", "= check_release_arch(rel) if status != 'ok': msg = '\\nCheck release architecture status: %s'", "file system or CMS storage element. Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo", "flt in args[1:]: res = apply_filter(flt.strip(), gen) RESMGR.assign(res) list_results(res, debug) def verbose(arg): \"\"\"", "= item[0] status = item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel):", "'/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run =", "'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager", "available CMS releases. Optional parameters either <list> or <all> Examples: cmssh> releases #", "mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug = get_ipython().debug except: debug =", "print \"Installing cms+cmssw-patch+%s ...\" % rel cmd = 'source %s; apt-get install cms+cmssw-patch+%s'", "%s' \\ % (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']: msg = 'Your", "'bin'), arg.strip()) run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\" #", "dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg", "'Bootstrap %s ...' % arch # run bootstrap command in subprocess.call since it", "DEBUG.level) def debug_http(arg): \"\"\" Show or set HTTP debug flag. Default is 0.", "= 'du ' + arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\"", "= '%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if", "command to read provided HTML page (by default output dumps via pager) Examples:", "- site, which lists jobs at given site - dashboard, which lists jobs", "run(\"mkdir %s\" % arg) else: try: status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc()", "cms_architectures('all') if arg not in cms_archs: msg = 'Wrong architecture, please choose from", "lib) if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(link, dst) # switch to given release", "Examples: cmssh> rm local_file cmssh> rm -rf local_dir cmssh> rm T3_US_Cornell:/xrootdfs/cms/store/user/user_name/file.root \"\"\" arg", "queue or CMS jobs\\n' msg += msg_green('read ') \\ + ' read URL/local", "self.cmd = cmd def execute(self, args=''): \"Execute given command in current shell environment\"", "= get_ipython() if arg[0] == '(' and arg[-1] == ')': arg = arg[1:-1]", "= os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname = os.path.join(reldir, name) if name.find('edm')", "fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template =", "'%s/%s/%s' % (path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst):", "= 'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel) else: print \"Installing cms+cmssw+%s", "cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils import list_results, check_os,", "CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat = pat_release", "or ls /store/user/file.root\\n' msg += msg_green('rm ') + ' remove file/LFN, ' \\", "' + msg_blue('commands') msg += '\\ncmssh command help : ' + msg_blue('cmshelp <command>')", "'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir)", "rmdir <options> dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg) else: try: status =", "%s' % status print msg return print \"Searching for %s\" % rel script", "which queries DAS data-service with provided query and returns results in JSON data", "proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery results are accessible via %s function,", "userdn = os.environ.get('USER_DN', '') if not userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr", "accept <list|all> args\\n' msg += msg_green('scram ') + ' CMSSW scram command\\n' msg", "it invokes # wget/curl and it can be spawned into serate process, therefore", "command to run ROOT xrdcp via cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root", "if len(args) == 1: # no filter res = CMSMGR.lookup(arg) else: gen =", "<rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg += '\\nInstalled releases: ' + msg_green(', '.join(releases)) print", "None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area: msg =", "+= msg_green('cmsrel ') \\ + ' switch to given CMSSW release and setup", "local files/dirs and CMS storate elements. Examples: cmssh> rm local_file cmssh> rm -rf", "cms_arch(arg=None): \"\"\" Show or set CMSSW architecture. Optional parameters either <all> or <list>", "current setting cmssh> pager None # set pager to nill \"\"\" arg =", "inst in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available cmssh", "% startswith print_error(msg) else: cmd = 'ls ' + orig_arg run(cmd, shell=True) if", "'\\nAvailable GRID commands: <cmd> either grid or voms\\n' msg += msg_green('vomsinit ') \\", "msg_green('cmsRun ') \\ + ' cmsRun command for release in question\\n' msg +=", "lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except: debug = 0 arg", "note: to enable access to RunSummary service please ensure that your usercert.pem is", "<options> dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg) else: try: status = rmdir(arg,", "# debug shell command # \"\"\" # arg = arg.strip() # if arg:", "ISO-8859-1 -*- #pylint: disable-msg=W0702 \"\"\" Set of UNIX commands, e.g. ls, cp, supported", "os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not", "execute it in a shell \"\"\" def __init__(self, cmd): self.cmd = cmd def", "+ ' and hit ' + msg_blue('Enter') + '\\n' print msg while True:", "is not yet installed on your system.\\n' msg += 'Use ' + msg_green('releases')", "CMSSW release):\\n' msg += msg_green('releases ') \\ + ' list available CMSSW releases,", "shutil.rmtree(dst) os.symlink(link, dst) # switch to given release os.environ['CMSSW_VERSION'] = rel os.environ['CMSSW_WORKAREA'] =", "' msg += 'appropriately and re-run crab command' print_info(msg) print \"cwd:\", os.getcwd() return", "print_error(msg) return if not user_input('Send this ticket', default='N'): print_info('Aborting your action') return key", "% arg run(cmd) else: if pat_lfn.match(arg.split(':')[-1]): status = rm_lfn(arg, verbose=debug) print_status(status) else: if", "= get_ipython().debug except: debug = 0 orig_arg = arg if orig_arg.find('|') != -1:", "it is not required to use cmsenv\\n' msg += 'please use ' +", "details for given ticket id cmssh> ticket new # post new ticket from", "lists jobs of current user - user, which lists jobs of given user", "arg = arg.strip() # if arg: # print_info(\"Set debug level to %s\" %", "None try: debug = get_ipython().debug except: debug = 0 orig_arg = arg if", "os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if not vdir or not arch: msg", "arg = arg.replace('run=', '') res = run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=',", "\"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init", "os.environ.get('CMSSH_CMSSW', None): msg = '\\nYou are not allowed to install new release, '", "['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir) os.mkdir(pdir) touch(os.path.join(pdir,", ": ') if not email: msg = \"You did your email address\" print_error(msg)", "pat_site.match(arg): arg = arg.replace('site=', '') res = site_info(arg, debug) elif pat_lfn.match(arg): arg =", "os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager to %s\"", "architectures:' else: print 'CMSSW architectures for %s:' \\ % os.uname()[0].replace('Darwin', 'OSX') for name", "= arg.split(' ')[-1].strip() if last_arg == '&': background = True arg = arg.replace('&',", "', '.join(cms_archs) raise Exception(msg) print \"Switch to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg", "arg = arg.replace('site=', '') res = site_info(arg, debug) elif pat_lfn.match(arg): arg = arg.replace('file=',", "%s DBS instance\" % arg else: print \"Invalid DBS instance\" else: msg =", "+= msg_green('das ') + ' query DAS service\\n' msg += msg_green('das_json ') \\", "\"\"\" cmssh release setup command, it setups CMSSW environment and creates user based", "% cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' % root", "\"Searching for %s\" % rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache", "msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return # check if we have", "'Available cmssh commands:\\n' msg += msg_green('find ') \\ + ' search CMS meta-data", "msg += msg_green('rm ') + ' remove file/LFN, ' \\ + 'e.g. rm", "arg = arg.replace('&', '').strip() src, dst = arg.rsplit(' ', 1) if dst.find('&') !=", "run-time environment cmd = 'eval `scramv1 runtime -sh`; env | grep ^ROOTSYS=' stdout,", "% os.environ.get('DBS_INSTANCE', 'global') print msg print '\\nAvailable DBS instances:' for inst in dbs_instances():", "msg_green('vomsinit ') \\ + ' setup your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo", "os.getcwd() return if os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return", "= rel os.environ['CMSSW_WORKAREA'] = os.path.join(cmssw_dir, rel) if os.path.isdir(os.path.join(cmssw_dir, rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel", "releases on given platform cmssh> releases all # show all known CMS releases,", "print_info('Dashboard information, site=%s' % site) res = jobsummary({'site': site}) elif pat_user.match(arg): user =", "'cms'+arg]: func = ipython.find_magic(case) if func: doc = func.func_doc break else: doc =", "site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info", "import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from", "sename, 'rm %s/file.root' % sename, 'ls %s' % sename, 'rm file.root', 'cp %s", "if os.path.islink(dst): os.remove(dst) else: shutil.rmtree(dst) os.symlink(rootsys, dst) # set edm utils for given", "msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg += ' for r in results():", "= pprint.pformat(res) else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket)", "return print_info(\"Set HTTP debug level to %s\" % arg) os.environ['HTTPDEBUG'] = arg else:", "have yet CMSSW release installed on your system.\" msg += \"\\nPlease use \"", "cmssh> cmshelp ls \"\"\" if arg: if arg.strip() == 'commands': cms_commands() return ipython", "'du ' + arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform", "msg += msg_green('dbs_instance') \\ + ' show/set DBS instance, default is DBS global", "subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list of releases installed on a system\"", "% DEBUG.level) def debug_http(arg): \"\"\" Show or set HTTP debug flag. Default is", "msg += '\\nInstall python software: ' + \\ msg_blue('pip <search|(un)install> <package>') return msg", "not exists' % dst) else: raise Exception('Not implemented yet') def cms_rmdir(arg): \"\"\" cmssh", "' + orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_jobs(arg=None):", "with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms -key %s", "in releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command to source pkg", "of options: - list, which lists local transfer jobs - site, which lists", "res = file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=', '') res = block_info(arg,", "'/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel,", "\"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x", "cp <options> source_file target_{file,directory}\") pat = pat_se orig = src.split(' ')[-1] if os.path.exists(orig)", "% prompt return # check if given release name is installed on user", "shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> '", "or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ + ' list file/LFN, e.g.", "du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once you install any CMSSW release):\\n'", "while found arch=%s' \\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is not", "arg.startswith(item + '='): startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls", "+ msg_blue('Enter') + '\\n' print msg while True: try: uinput = raw_input() if", "final message print \"%s is ready, cwd: %s\" % (rel, os.getcwd()) def cmsexe(cmd):", "'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab", "'cmsRun %s' % arg cmsexe(cmd) def cms_pager(arg=None): \"\"\" cmssh command to show or", "arg = arg.replace('lfn=', '') res = file_info(arg, debug) elif pat_block.match(arg): arg = arg.replace('block=',", "if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager to", "user' % (rel, arch) output.append(msg) if output: return ', '.join(output) osname, osarch =", "val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is set to: %s\" %", "print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG', 0)) def cms_find(arg): \"\"\" Perform lookup", "platform() == 'osx': idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print", "dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell', 'ls T3_US_Cornell'] cmd_list += ['ls %s'", "default is DBS global instance\\n' msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command,", "cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug =", "detailed info about given CMS entity, ' \\ + 'e.g. info run=160915\\n' msg", "cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp", "lists local transfer jobs - site, which lists jobs at given site -", "' \\ + 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ')", "%s\" % arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level is %s\" %", "CMSSW_X_Y_Z<_patchN>' print msg return # check if we have stand-alone installation if os.environ.get('CMSSH_CMSSW',", "pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' %", "cmssh was installed with system CMSSW install area' print msg return # check", "at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if", "https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try: debug = get_ipython().debug except: debug =", "= \"DBS instance is set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print msg", "gcc_init = pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s %s %s' % (pcre_init,", "and CMS storate elements. Examples: cmssh> rm local_file cmssh> rm -rf local_dir cmssh>", "%s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment cmd", "for cmssh\" for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\", "') \\ + ' show your proxy info (aka voms-proxy-info)\\n' msg += '\\nQuery", "lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility cmssh command. Examples: cmssh> du #", "dst) link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst)", "method will run in current shell environment # old command for reference: #", "' msg = msg_red(msg) msg += msg_blue('cmsrel <rel>\\n') releases = os.listdir(os.environ['CMSSW_RELEASES']) msg +=", "%s' % dataset] cmd_list += ['find dataset=/ZMM*', 'das dataset=/ZMM*', 'find dataset file=%s' %", "CMSSW_X_Y_Z') \\ + ' command to install one' print msg def cms_read(arg): \"\"\"", "return desc = '' msg = 'Type your problem, attach traceback, etc. Once", "in CMS data-services. \"\"\" arg = arg.strip() debug = get_ipython().debug args = arg.split('|')", "msg_green('cp ') \\ + ' copy file/LFN, e.g. cp local.file or cp /store/user/file.root", "release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat = pat_release if", "cmssh> tickets # list all cmssh tickets cmssh> ticket 14 # get details", "import base64 import pprint import mimetypes import traceback import subprocess # cmssh modules", "' install CMSSW release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ +", "Examples: cmssh> read https://cmsweb.cern.ch/couchdb/reqmgr_config_cache/7a2f69a2a0a6df3bf57ebd6586f184e1/configFile cmssh> read https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug", "msg = 'Not supported apt command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh", "you like to create one' if user_input(msg, default='N'): with open('crab.cfg', 'w') as config:", "') + ' CMSSW scram command\\n' msg += msg_green('cmsRun ') \\ + '", "system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output", "installed on a system\" _osname, osarch = osparameters() releases = [] for idir", "to: eval `scramv1 runtime -sh`' msg = 'Within cmssh it is not required", "= 0 if not arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':') == -1:", "need # to add scramv1 command in front of edm one, since #", "cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\"", "dashboard, which lists jobs of current user - user, which lists jobs of", "user=my_cms_user_name \"\"\" res = None try: debug = get_ipython().debug except: debug = 0", "msg print '\\nAvailable DBS instances:' for inst in dbs_instances(): print inst def cms_help_msg():", "res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url']) title = 'cmssh gist %s' %", "= None entities = \\ ['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file']", "= rel.strip() if not rel or rel in ['reset', 'clear', 'clean']: path =", "check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check release/architecture\" # check if given", "print \"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel,", "= os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel): \"\"\" cmssh command to install", "orig_arg.split('|', 1) arg = arg.strip() else: flt = None if arg: arg =", "%s' % fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg)", "print demo_file.read() def results(): \"\"\"Return results from recent query\"\"\" return RESMGR def cms_commands(_arg=None):", "# subprocess.Popen will not catch it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True)", "command\\n' msg += msg_green('cmsRun ') \\ + ' cmsRun command for release in", "True: try: uinput = raw_input() if uinput.strip() == 'EOF': break desc += uinput", "or remote CMS storage element. Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\"", "# show all CMSSW architectures for given platform \"\"\" if not arg: print", "== 0 or arg == '0': ipth.debug = False else: ipth.debug = True", "(path, pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python'", "provides detailed info about given CMS entity, ' \\ + 'e.g. info run=160915\\n'", "\"voms-proxy-init -rfc -voms cms:/cms -key %s -cert %s\" % (key, cert) run(cmd) userdn", "if arg: # print_info(\"Set debug level to %s\" % arg) # DEBUG.set(arg) #", "usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try: debug =", "arg = arg.replace('block=', '') res = block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=',", "= copy_lfn(orig, dst, debug, background, overwrite) print_status(status) except: traceback.print_exc() def cms_architectures(arch_type=None): \"Return list", "your system.\" msg += \"\\nPlease use \" + msg_green('install CMSSW_X_Y_Z') \\ + '", "% fname cmd = fname ipython.register_magic_function(Magic(cmd).execute, 'line', name) # Set cmssh prompt ipython.prompt_manager.in_template", "'%s %s' % (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given command in", "to SCRAM_ARCH=%s\" % arg os.environ['SCRAM_ARCH'] = arg def cms_apt(arg=''): \"Execute apt commands\" if", "except: debug = 0 arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=',", "# check if given release name is installed on user system rel_arch =", "= \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename", "T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg = arg", "pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for", "cmssh.tagcollector import architectures as tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR,", "working_pem from cmssh.cmssw_utils import crab_submit_remotely, crabconfig from cmssh.cern_html import read from cmssh.dashboard import", "= arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if func:", "touch, platform, fix_so from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs", "file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name cmssh> cp T3_US_Cornell:/store/user/name/file.root", "else: cmd = split[0] args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg): \"\"\" cmssh", "= pkg_init('external/gcc') root_init = pkg_init('lcg/root') pkgs_init = '%s %s %s' % (pcre_init, gcc_init,", "msg += ' command to install given release.' print msg return # set", "= res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res = get_tickets(arg)", "\"cmsenv command\" # in CMS cmsenv is an alias to: eval `scramv1 runtime", "cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup of given query", "del os.environ['CMSSH_PAGER'] else: os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager to %s\" %", "arg.find(':') == -1: # not a SE:dir pattern run(\"mkdir %s\" % arg) else:", "msg_green('install CMSSW_X_Y_Z') \\ + ' command to install one' print msg def cms_read(arg):", "fix_so from cmssh.cmsfs import dataset_info, block_info, file_info, site_info, run_info from cmssh.cmsfs import CMSMGR,", "check_os, unsupported_linux, access2file from cmssh.utils import osparameters, check_voms_proxy, run, user_input from cmssh.utils import", "res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\" root =", "== 'dashboard': userdn = os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard information,", "read URL/local file content\\n' msg += msg_green('root ') + ' invoke ROOT\\n' msg", "KeyboardInterrupt: break if not desc: msg = \"You did not provide bug description\"", "\\ + 'e.g. mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\", "within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch = os.environ.get('SCRAM_ARCH', None) if", "def cms_apt(arg=''): \"Execute apt commands\" if '-cache' in arg or '-get' in arg:", "time import json import glob import shutil import base64 import pprint import mimetypes", "arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh command to run ROOT xrdcp via cmssh", "background = False orig_arg = arg arg = arg.strip() try: last_arg = arg.split('", "use ' + msg_green('cmsrel') + ' command and ' msg += 'CMS release", "[] try: debug = get_ipython().debug except: debug = 0 orig_arg = arg if", "given CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat =", "print_info(\"Set HTTP debug level to %s\" % arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP", "dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip() if pat_dataset.match(arg):", "including online, tests, etc. \"\"\" if arg: print \"CMSSW releases for %s platform\"", "') \\ + ' query DAS and return data in JSON format\\n' msg", "was installed with system CMSSW install area' print msg return # check if", "msg_green('find ') \\ + ' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance')", "for given site, e.g. du T3_US_Cornell\\n' msg += '\\nAvailable CMSSW commands (once you", "== 1: # no filter res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for", "cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch = arch", "cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat = pat_release if not pat.match(rel):", "work_area) return cmd = 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def cmsrun(arg):", "'%s|\\#> ' % prompt return # check if given release name is installed", "- dashboard, which lists jobs of current user - user, which lists jobs", "choose from the following list\\n' msg += ', '.join(cms_archs) raise Exception(msg) print \"Switch", "voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ + ' show your proxy info (aka", "msg += '\\ncmssh command help : ' + msg_blue('cmshelp <command>') msg += '\\nInstall", "arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup of given", "file.root' % lfn, 'ls', 'cp file.root %s' % sename, 'ls %s' % sename,", "lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' % dataset2] cmd_list += ['du T3_US_Cornell',", "startswith = None entities = \\ ['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release',", "install new release, ' msg += 'since cmssh was installed with system CMSSW", "not vdir or not arch: msg = 'Unable to identify CMSSW environment, please", "apt-get install cms+cmssw-patch+%s' % (script, rel) else: print \"Installing cms+cmssw+%s ...\" % rel", "None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user) res =", "dataset, 'ls run=%s' % run, 'ls file=%s' % lfn] cmd_list += ['ls %s'", "to %s\" % arg else: val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager", "-sh`' msg = 'Within cmssh it is not required to use cmsenv\\n' msg", "= 0 orig_arg = arg if orig_arg.find('|') != -1: arg, flt = orig_arg.split('|',", "new ticket via web interface at\\n' msg += 'https://github.com/vkuznet/cmssh/issues/new\\n' msg += 'otherwise it", "CMSSW releases on given platform cmssh> releases all # show all known CMS", "for debug_http command') return print_info(\"Set HTTP debug level to %s\" % arg) os.environ['HTTPDEBUG']", "arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG', 0))", "cmd = 'source %s; apt-get install cms+cmssw+%s' % (script, rel) subprocess.call(cmd, shell=True) #", "= [] for item in releases: rel_arch = item[0] status = item[1] if", "set pager to nill \"\"\" arg = arg.strip() if arg: if arg ==", "msg = 'No pattern is allowed for %s look-up' % startswith print_error(msg) else:", "') if len(options) > 1 and options[0] == '-f': overwrite = True else:", "0 if not arg: print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat = pat_se orig", "cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n', '').replace('ROOTSYS=', '') dst = '%s/install/lib/release_root' %", "ROOT\\n' msg += msg_green('du ') \\ + ' display disk usage for given", "\"\"\" cmssh rmdir command removes directory from local file system or CMS storage", "= '' else: cmd = split[0] args = split[-1] mgr.run_line_magic(cmd, args) def cms_info(arg):", "osname == 'osx' and osarch == 'ia32': return 'OSX/ia32 is not supported in", "if last_arg == '&': background = True arg = arg.replace('&', '').strip() src, dst", "def get_apt_init(arch): \"Return proper apt init.sh for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'],", "== 'EOF': break desc += uinput + '\\n' except KeyboardInterrupt: break if not", "lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> lumi block=/Photon/Run2012A-29Jun2012-v1/AOD#3e33ce8e-c44d-11e1-9a26-003048f0e1c6find cmssh> lumi file=/store/data/Run2012A/Photon/AOD/29Jun2012-v1/0000/001B241C-ADC3-E111-BD1D-001E673971CA.root cmssh> lumi run=190704 cmssh> lumi", "import release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url", "status of job queue or CMS jobs\\n' msg += msg_green('read ') \\ +", "'%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area for %s", "= os.environ.get('CMSSW_WORKAREA', None) if not rel or not work_area: msg = 'In order", "\"\"\" Set/get verbosity level \"\"\" arg = arg.strip() ipth = get_ipython() if arg", "is in place status = check_release_arch(rel) if status != 'ok': msg = '\\nCheck", "cmd = 'du ' + arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg):", "return output def check_release_arch(rel): \"Check release/architecture\" # check if given release name is", "idir) if os.path.islink(pdir): os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt", "debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res = dataset_info(arg, debug) except", "= '\\n%s/%s is not installed within cmssh, proceed' \\ % (rel, arch) if", "= 'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return # check if given", "else: cms_archs = cms_architectures('all') if arg not in cms_archs: msg = 'Wrong architecture,", "% arg) os.environ['HTTPDEBUG'] = arg else: print_info(\"HTTP debug level is %s\" % os.environ.get('HTTPDEBUG',", "%s' % (pcre_init, gcc_init, root_init) cmd = '%s root -l %s' % (pkgs_init,", "out = orig_arg.split('>', 1) out = out.strip() arg = arg.strip() else: out =", "<usercert.pem> \"\"\" cert = os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd =", "try: debug = get_ipython().debug except: debug = 0 if debug and access2file(fname): with", "-f options = src.split(' ') if len(options) > 1 and options[0] == '-f':", "'')) def cms_lumi(arg): \"\"\" Return lumi info for a given dataset/file/block/lfn/run Examples: cmssh>", "filter res = CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res", "0 if not arg: print_error(\"Usage: mkdir <options> dir\") if arg.find(':') == -1: #", "at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try: debug = get_ipython().debug except: debug", "pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\" root = os.environ.get('CMSSH_ROOT') path = os.path.join(root,", "CMS rm command works with local files/dirs and CMS storate elements. Examples: cmssh>", "os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in ['FWCore', 'DataFormats']: pdir", "os.remove(pdir) if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template", "idir = '%s/%s/cms/cmssw/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area", "os.path.exists(arg): run(\"rmdir %s\" % arg) else: try: status = rmdir(arg, verbose=debug) print_status(status) except:", "= rmdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command creates", "remote CMS storage element. Examples: cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg", "= \"You did not provide bug description\" print_error(msg) return if not user_input('Send this", "\"scramv1 project CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from", "\"\"\" Execute given command within CMSSW environment \"\"\" vdir = os.environ.get('VO_CMS_SW_DIR', None) arch", "' command' print_error(msg) return # check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab')", "print msg return cmd = \"eval `scramv1 runtime -sh`; %s\" % cmd run(cmd,", "+ msg_green('install CMSSW_X_Y_Z') \\ + ' command to install one' print msg def", "msg += 'otherwise it will be posted as anonymous gist ticket' print_info(msg) if", "\"\"\" Return lumi info for a given dataset/file/block/lfn/run Examples: cmssh> lumi run=190704 cmssh>", "== 'commands': cms_commands() return ipython = get_ipython() if arg[0] == '(' and arg[-1]", "'Use ' + msg_green('releases') msg += ' command to list available releases.\\n' msg", "status != 'ok': msg = '\\nCheck release architecture status: %s' % status print", "for release in question\\n' msg += '\\nAvailable GRID commands: <cmd> either grid or", "ls command lists local files/dirs/CMS storate elements or CMS entities (se, site, dataset,", "+ msg_blue('cmshelp <command>') msg += '\\nInstall python software: ' + \\ msg_blue('pip <search|(un)install>", "= orig_arg.split('|', 1) arg = arg.strip() else: flt = None if arg: arg", "os.uname()[0].replace('Darwin', 'OSX') for name in cms_architectures('all'): if arg == 'all': print name else:", "debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rm <options>", "proceed' \\ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not", "or arg == 'None' or arg == 'False': if os.environ.has_key('CMSSH_PAGER'): del os.environ['CMSSH_PAGER'] else:", "pat_se.match(arg): arg = arg.replace('site=', '') res = list_se(arg, debug) elif pat_site.match(arg): arg =", "and user asked for -f options = src.split(' ') if len(options) > 1", "msg += msg_green('cp ') \\ + ' copy file/LFN, e.g. cp local.file or", "not pat.match(dst): if background: cmd = 'cp %s' % orig_arg subprocess.call(cmd, shell=True) else:", "#!/usr/bin/env python #-*- coding: ISO-8859-1 -*- #pylint: disable-msg=W0702 \"\"\" Set of UNIX commands,", "orig_arg = arg if orig_arg.find('>') != -1: arg, out = orig_arg.split('>', 1) out", "/store/user/file.root .\\n' msg += msg_green('info ') \\ + ' provides detailed info about", "'src')) # get ROOT from run-time environment cmd = 'eval `scramv1 runtime -sh`;", "+ ' command to install one' print msg def cms_read(arg): \"\"\" cmssh command", "os.environ['DBS_INSTANCE'] = arg print \"Switch to %s DBS instance\" % arg else: print", ".\\n' msg += msg_green('info ') \\ + ' provides detailed info about given", "+ ' query DAS service\\n' msg += msg_green('das_json ') \\ + ' query", "site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res = None try: debug", "['pager 0', 'debug_http 0'] cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt',", "# use subprocess due to apt-get interactive feature if platform() == 'osx': idir", "ttt', 'ls'] cmd_list += ['ls dataset=%s' % dataset, 'ls run=%s' % run, 'ls", "'cp %s file1.root &' % lfn, 'cp %s file2.root &' % lfn2, 'ls']", "'__init__.py')) pkgs = ['Framework', 'GuiBrowsers', 'Integration', 'MessageLogger', 'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities']", "+= '\\nAvailable GRID commands: <cmd> either grid or voms\\n' msg += msg_green('vomsinit ')", "dataset, 'find lumi {\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' %", "crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg", "base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n') send_email(to_user, email, title, ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None):", "set HTTP debug flag. Default is 0. \"\"\" arg = arg.strip() if arg:", "0 orig_arg = arg if orig_arg.find('|') != -1: arg, flt = orig_arg.split('|', 1)", "out = None if arg: arg = arg.strip() read(arg, out, debug) def cms_releases(arg=None):", "be set for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup command, it", "arg == 'list': print_info('Local data transfer') dqueue(arg) elif arg == 'dashboard': userdn =", "release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls import dbs_instances, tc_url from", "' + arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup", "cmssh> ticket 14 # get details for given ticket id cmssh> ticket new", "cmssh> dbs_instance cms_dbs_prod_global \"\"\" arg = arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] =", "jobs at given site - dashboard, which lists jobs of current user -", "\\ + ' setup your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\", "for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)):", "'\\n' except KeyboardInterrupt: break if not desc: msg = \"You did not provide", "available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check", "%s -arch %s' % (swdir, swdir, arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack'", "CMSSW releases, accepts <list|all> args\\n' msg += msg_green('install ') \\ + ' install", "print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue from cmssh.utils import", "sename, 'ls %s' % sename, ] cmd_list += ['cp %s file.root' % lfn,", "install given CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel = rel.strip() pat", "reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname = os.path.join(reldir, name) if", "if arg == 'all': print 'CMSSW architectures:' else: print 'CMSSW architectures for %s:'", "= None for arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if", "from cmssh.iprint import print_warning, print_error, print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir,", "send_email from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn, pat_run,", "return script def cms_install(rel): \"\"\" cmssh command to install given CMSSW release. Examples:", "if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in %s' % crab_dir", "%s\" % item) split = item.split(' ', 1) if len(split) == 1: cmd", "for name in cms_architectures('all'): if arg == 'all': print name else: if check_os(name):", "ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show cmssh demo file\"", "mkdir /path/foo or rmdir T3_US_Cornell:/store/user/foo\\n' msg += msg_green('ls ') \\ + ' list", "\\ + ' show/set DBS instance, default is DBS global instance\\n' msg +=", "if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root = os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW'", "+= 'Use ' + msg_green('releases') msg += ' command to list available releases.\\n'", "else: fname = os.environ.get('CMS_JSON') print_info('CMS JSON: %s' % fname) try: debug = get_ipython().debug", "architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup", "not supported in CMSSW' return 'no match' def get_apt_init(arch): \"Return proper apt init.sh", "information, user=%s' % user) res = jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=',", "arg = arg.strip() else: out = None if arg: arg = arg.strip() read(arg,", "json import glob import shutil import base64 import pprint import mimetypes import traceback", "CMS data-services. \"\"\" arg = arg.strip() debug = get_ipython().debug args = arg.split('|') if", "= CMSMGR.lookup(arg) else: gen = CMSMGR.lookup(args[0].strip()) for flt in args[1:]: res = apply_filter(flt.strip(),", "+= 'since cmssh was installed with system CMSSW install area' print msg return", "{'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item in releases:", "run crab command you must ' msg += 'run ' + msg_blue('cmsrel') +", "= block_info(arg, debug) elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res = dataset_info(arg,", "= None try: debug = get_ipython().debug except: debug = 0 orig_arg = arg", "sename, ] cmd_list += ['cp %s file.root' % lfn, 'ls', 'cp file.root %s'", "cms_apt(arg=''): \"Execute apt commands\" if '-cache' in arg or '-get' in arg: cmd", "and setup its environment\\n' msg += msg_green('arch ') \\ + ' show or", "dyld_path #def debug(arg): # \"\"\" # debug shell command # \"\"\" # arg", "dataset=/Cosmics/CRUZET3-v1/RAW cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh>", "https://twiki.cern.ch/twiki/bin/view/CMSPublic/WorkBookFWLitePython cmssh> read config.txt \"\"\" try: debug = get_ipython().debug except: debug = 0", "% (script, rel) subprocess.call(cmd, shell=True) # use subprocess due to apt-get interactive feature", "= os.path.join(os.environ['CMSSH_ROOT'], 'CMSSW') debug = 0 msg = 'Bootstrap %s ...' % arch", "0', 'debug_http 0'] cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls']", "len(split) == 1: cmd = item args = '' else: cmd = split[0]", "startswith: msg = 'No pattern is allowed for %s look-up' % startswith print_error(msg)", "cmssh, proceed' \\ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if", "print_warning, print_error, print_status, print_info from cmssh.filemover import copy_lfn, rm_lfn, mkdir, rmdir, list_se, dqueue", "print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin' and arg == '-submit':", "arg arg = arg.strip() try: last_arg = arg.split(' ')[-1].strip() if last_arg == '&':", "if pat_site.match(arg): lookup(arg) else: cmd = 'du ' + arg cmd = cmd.strip()", "verbosity level \"\"\" arg = arg.strip() ipth = get_ipython() if arg == '':", "\"\\nInstalled releases:\" for rel in releases: print rel else: msg = \"\\nYou don't", "cmd_list += ['ls %s' % dataset, 'info %s' % dataset] cmd_list += ['find", "release setup command, it setups CMSSW environment and creates user based directory structure.", "msg += ' for r in results(): print r, type(r)\\n' msg += '\\nList", "os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): return 'ok' output = [] for arch, status in get_release_arch(rel): if", "\\ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\", "set at cmsrel) rel = os.environ.get('CMSSW_VERSION', None) work_area = os.environ.get('CMSSW_WORKAREA', None) if not", "-name init.sh | tail -1`; ' \\ % (swdir, arch) cmd += 'apt-get", "Examples: cmssh> pager # shows current setting cmssh> pager None # set pager", "apt init.sh for given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs", "rel) run(cmd) if rel.lower().find('patch') != -1: print \"Installing cms+cmssw-patch+%s ...\" % rel cmd", "os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' % idir) if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' % (rel,", "msg = 'Your crab.cfg has been created, please edit it ' msg +=", "CMSSW release, e.g. install CMSSW_5_0_0\\n' msg += msg_green('cmsrel ') \\ + ' switch", "shell=True) else: run(\"cp %s %s\" % (src, dst)) else: try: status = copy_lfn(orig,", "and work area are set (should be set at cmsrel) rel = os.environ.get('CMSSW_VERSION',", "= stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets, e.g. Examples:", "get_ipython() magic = ipython.find_line_magic('edmFileUtil') if magic: if arg[0] == '/': cmd = '-e", "'rm %s/file.root' % sename, 'ls %s' % sename, 'rm file.root', 'cp %s file1.root", "arg else: msg = 'Not supported apt command' raise Exception(msg) run(cmd) def cms_das(query):", "+= uinput + '\\n' except KeyboardInterrupt: break if not desc: msg = \"You", "\\ + ' show or switch to given CMSSW architecture, accept <list|all> args\\n'", "python software: ' + \\ msg_blue('pip <search|(un)install> <package>') return msg def cms_help(arg=None): \"\"\"", "print doc def cms_rm(arg): \"\"\" CMS rm command works with local files/dirs and", "= run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=', '') res = release_info(arg, debug)", "src.split(' ')[-1] if os.path.exists(orig) and not pat.match(dst): if background: cmd = 'cp %s'", "pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg) if os.path.exists(pdir): shutil.rmtree(pdir)", "'').replace('run=', '') res = run_lumi_info(arg, debug) def cms_json(arg): \"Print or set location of", "is installed on user system rel_dir = '%s/cms/cmssw/%s' % (os.environ['SCRAM_ARCH'], rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'],", "-cert %s\" % (key, cert) run(cmd) userdn = os.environ.get('USER_DN', '') if not userdn:", "= execmd(cmd) if stderr: print \"While executing cmd=%s\" % cmd print_warning(stderr) rootsys =", "command') return print_info(\"Set HTTP debug level to %s\" % arg) os.environ['HTTPDEBUG'] = arg", "# run bootstrap command in subprocess.call since it invokes # wget/curl and it", "validate_dbs_instance from cmssh.cmsfs import release_info, run_lumi_info from cmssh.github import get_tickets, post_ticket from cmssh.cms_urls", "CRAB command, help is available at https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ:", "= os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE'] =", "return # check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir,", "install area' print msg return # check if given release/architecture is in place", "given dataset Examples: cmssh> config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip() if", "def cms_ls(arg): \"\"\" cmssh ls command lists local files/dirs/CMS storate elements or CMS", "res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for r in res] releases", "0 res = das_client(host, query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None):", "setups CMSSW environment and creates user based directory structure. Examples: cmssh> cmsrel #", "os.path.exists(dst): print_error('File %s does not exists' % dst) else: raise Exception('Not implemented yet')", "os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup -path %s -arch %s' % (swdir,", "0'] cmd_list += ['ls', 'mkdir ttt', 'ls -l', 'rmdir ttt', 'ls'] cmd_list +=", "== 'all' or arg == 'list': if arg == 'all': print 'CMSSW architectures:'", "= arg.replace('release=', '') res = release_info(arg, debug) elif startswith: msg = 'No pattern", "\"\"\" cmssh command to run ROOT within cmssh Examples: cmssh> root -l \"\"\"", "options = src.split(' ') if len(options) > 1 and options[0] == '-f': overwrite", "CMSSW architectures (aka SCRAM_ARCH)\" archs = [a for a in tc_architectures(arch_type)] return archs", "= os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file: print demo_file.read() def results(): \"\"\"Return", "' search CMS meta-data (query DBS/Phedex/SiteDB)\\n' msg += msg_green('dbs_instance') \\ + ' show/set", "% val print msg def dbs_instance(arg=None): \"\"\" cmssh command to show or set", "{\"190704\":[1,2,3]}', 'find lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' % dataset2] cmd_list +=", "if not os.path.isdir(pkg_dir): cmd = '' return cmd def cms_root(arg): \"\"\" cmssh command", "is %s\" % DEBUG.level) def debug_http(arg): \"\"\" Show or set HTTP debug flag.", "execute CMSSW cmsRun command. Requires cmsrel to setup CMSSW environment. \"\"\" cmd =", "rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if not os.path.isdir(cmssw_dir): os.makedirs(cmssw_dir) root", "site}) elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information, user=%s' % user) res", "= 'Not supported apt command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command", "= '%s root -l %s' % (pkgs_init, arg.strip()) run(cmd) def cms_xrdcp(arg): \"\"\" cmssh", "cmd = 'source `find %s/%s/external/apt -name init.sh | tail -1`; ' \\ %", "# wget/curl and it can be spawned into serate process, therefore # subprocess.Popen", "= get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for", "else: ipth.debug = True # CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\" swdir", "msg_blue('EOF') + ' and hit ' + msg_blue('Enter') + '\\n' print msg while", "arch in cms_architectures(): rel_dir = '%s/cms/cmssw/%s' % (arch, rel) if os.path.isdir(os.path.join(os.environ['VO_CMS_SW_DIR'], rel_dir)): rel_arch", "'find lumi {190704:[1,2,3]}'] cmd_list += ['find config dataset=%s' % dataset2] cmd_list += ['du", "name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script def cms_install(rel): \"\"\"", "cmssh> ls T3_US_Cornell:/store/user/valya cmssh> ls run=160915 \"\"\" arg = arg.strip() res = []", "% idir) if os.path.isdir(rdir): for rel in os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if", "msg_green('read ') \\ + ' read URL/local file content\\n' msg += msg_green('root ')", "except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command lists local files/dirs/CMS storate elements", "with provided query and returns results in JSON data format Examples: cmssh> das_json", "email: msg = \"You did your email address\" print_error(msg) return desc = ''", "id cmssh> ticket new # post new ticket from cmssh # or post", "block_info, file_info, site_info, run_info from cmssh.cmsfs import CMSMGR, apply_filter, validate_dbs_instance from cmssh.cmsfs import", "lookup(arg) else: cmd = 'du ' + arg cmd = cmd.strip() subprocess.call(cmd, shell=True)", "pattern '%s' does not exist on local filesystem or in DBS\" % arg", "applies the following options -rfc -voms cms:/cms -key <userkey.pem> -cert <usercert.pem> \"\"\" cert", "status = rm_lfn(arg, verbose=debug) print_status(status) else: if not os.path.exists(dst): print_error('File %s does not", "os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in %s'", "get_ipython().magics_manager.lsmagic() cmds = [k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort() for", "releases: print rel else: msg = \"\\nYou don't have yet CMSSW release installed", "if debug and access2file(fname): with open(fname, 'r') as cms_json: print cms_json.read() def integration_tests(_arg):", "elif pat_run.match(arg): arg = arg.replace('run=', '') res = run_info(arg, debug) elif pat_release.match(arg): arg", "print \"Switch to %s DBS instance\" % arg else: print \"Invalid DBS instance\"", "arg[0] == '/': cmd = '-e -f file:///%s' % fname else: cmd =", "dir\") if os.path.exists(arg): run(\"rmdir %s\" % arg) else: try: status = rmdir(arg, verbose=debug)", "os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup -path %s -arch", "(base, rel_arch, rel) os.environ['CMSSW_BASE'] = os.path.join(cmssw_dir, rel) os.environ['CMSSW_RELEASE_BASE'] = path for pkg in", "% (path, lib, rel_arch) dst = '%s/install/lib/release_%s' % (root, lib) if os.path.islink(dst): os.remove(dst)", "pat_run, pat_se, pat_user from cmssh.tagcollector import architectures as tc_architectures from cmssh.results import RESMGR", "os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s' % idir)", "arch break if not rel_arch: msg = 'Release ' + msg_red(rel) msg +=", "os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No", "+ orig_arg run(cmd, shell=True) elif pat_se.match(arg): arg = arg.replace('site=', '') res = list_se(arg,", "+ msg_green(', '.join(releases)) print msg return cmd = \"eval `scramv1 runtime -sh`; %s\"", "run, release, file). Examples: cmssh> ls # UNIX command cmssh> ls -l local_file", "run_info(arg, debug) elif pat_release.match(arg): arg = arg.replace('release=', '') res = release_info(arg, debug) elif", "run=160915 cmssh> info local_file.root Please note: to enable access to RunSummary service please", "local files/dirs or CMS storate elements. Examples: cmssh> cp file1 file2 cmssh> cp", "parameters either <list> or <all> Examples: cmssh> releases # show installed CMSSW releases", "-*- #pylint: disable-msg=W0702 \"\"\" Set of UNIX commands, e.g. ls, cp, supported in", "= arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else: msg =", "# post new ticket from cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\"", "= os.environ.get('USER_DN', None) if userdn: user = get_dashboardname(userdn) print_info('Dashboard information, user=%s' % user)", "'all': print name else: if check_os(name): print name else: cms_archs = cms_architectures('all') if", "command' print_error(msg) return # check existence of crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg", "SE:dir pattern run(\"mkdir %s\" % arg) else: try: status = mkdir(arg, verbose=debug) print_status(status)", "not arg or arg == 'list': print_info('Local data transfer') dqueue(arg) elif arg ==", "user=%s' % user) res = jobsummary({'user': user}) elif pat_site.match(arg): site = arg.replace('site=', '')", "list_se, dqueue from cmssh.utils import list_results, check_os, unsupported_linux, access2file from cmssh.utils import osparameters,", "crab_submit_remotely, crabconfig from cmssh.cern_html import read from cmssh.dashboard import jobsummary from cmssh.reqmgr import", "cmssh> cp T3_US_Cornell:/store/user/name/file.root T3_US_Omaha \"\"\" check_voms_proxy() background = False orig_arg = arg arg", "'global') print msg print '\\nAvailable DBS instances:' for inst in dbs_instances(): print inst", "available releases.\\n' msg += 'Use ' + msg_green('install %s' % rel) msg +=", "recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which lists all registered", "as demo_file: print demo_file.read() def results(): \"\"\"Return results from recent query\"\"\" return RESMGR", "import get_data, send_email from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import", "for inst in dbs_instances(): print inst def cms_help_msg(): \"\"\"cmsHelp message\"\"\" msg = 'Available", "arg.replace('lfn=', '').replace('run=', '') res = run_lumi_info(arg, debug) def cms_json(arg): \"Print or set location", "print_status(status) else: if not os.path.exists(dst): print_error('File %s does not exists' % dst) else:", "command' print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin' and arg ==", "'MessageService', 'Modules', 'ParameterSet', 'PythonUtilities', 'Services', 'Utilities'] for pkg in pkgs: link = '%s/src/FWCore/%s/python'", "be spawned into serate process, therefore # subprocess.Popen will not catch it run(cmd,", "system CMSSW install area' print msg return # check if given release/architecture is", "name.find('edm') == 0 and os.path.isfile(fname): # we use Magic(cmd).execute we don't need #", "\\ + ' read URL/local file content\\n' msg += msg_green('root ') + '", "+ ' list file/LFN, e.g. ls local.file or ls /store/user/file.root\\n' msg += msg_green('rm", "architectures for given platform \"\"\" if not arg: print \"Current architecture: %s\" %", "% sename, 'ls %s' % sename, ] cmd_list += ['cp %s file.root' %", "uinput + '\\n' except KeyboardInterrupt: break if not desc: msg = \"You did", "from cmssh.cms_objects import get_dashboardname def options(arg): \"\"\"Extract options from given arg string\"\"\" opts", "= arg arg = arg.strip() try: last_arg = arg.split(' ')[-1].strip() if last_arg ==", "transfer') dqueue(arg) elif arg == 'dashboard': userdn = os.environ.get('USER_DN', None) if userdn: user", "% (swdir, swdir, arch) if unsupported_linux(): cmd += ' -unsupported_distribution_hack' sdir = os.path.join(os.environ['CMSSH_ROOT'],", "debug(arg): # \"\"\" # debug shell command # \"\"\" # arg = arg.strip()", "\"\"\" Perform lookup of given query in CMS data-services. cmssh find command lookup", "\"\"\" cmssh command to install given CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\"", "arg = arg[1:-1] for case in [arg, 'cms_'+arg, 'cms'+arg]: func = ipython.find_magic(case) if", "os.environ['CMSSH_ROOT'] idir = os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' %", "be used with ipython magic functions. It holds given command and provide a", "= None if arg: arg = arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\"", "% (path, pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link =", "in res] releases = list(set(releases)) releases.sort() for rel in releases: print rel installed_releases()", "given architecture\" apt_dir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/external/apt' % arch) dirs = os.listdir(apt_dir) dirs.sort()", "status = item[1] if check_os(rel_arch): output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check release/architecture\"", "release/architecture is in place status = check_release_arch(rel) if status != 'ok': msg =", "'cp %s' % orig_arg subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" % (src, dst))", "ipth.debug) else: if arg == 0 or arg == '0': ipth.debug = False", "os.listdir(rdir): releases.append('%s/%s' % (rel, idir)) if releases: releases.sort() print \"\\nInstalled releases:\" for rel", "in CMS cmsenv is an alias to: eval `scramv1 runtime -sh`' msg =", "or arg == 'list': print_info('Local data transfer') dqueue(arg) elif arg == 'dashboard': userdn", "cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find lumi run=190704 cmssh> find", "from cmssh.regex import pat_lfn, pat_run, pat_se, pat_user from cmssh.tagcollector import architectures as tc_architectures", "cmd = 'ls ' + orig_arg run(cmd, shell=True) if res: RESMGR.assign(res) list_results(res, debug=True,", "for given platform \"\"\" if not arg: print \"Current architecture: %s\" % os.environ['SCRAM_ARCH']", "directory structure. Examples: cmssh> cmsrel # reset CMSSW environment to cmssh one cmssh>", "+ ' setup your proxy (aka voms-proxy-init)\\n' msg += msg_green('vomsinfo ') \\ +", "cmssh.das import das_client from cmssh.url_utils import get_data, send_email from cmssh.regex import pat_release, pat_site,", "\"\"\" Class to be used with ipython magic functions. It holds given command", "for %s platform\" % platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r)", "release is not officially supported under %s' \\ % (rel, arch) print_warning(msg) if", "-rfc -voms cms:/cms -key %s -cert %s\" % (key, cert) run(cmd) userdn =", "'/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename = 'T3_US_Cornell:/store/user/valya' cmd_list = ['pager 0', 'debug_http 0']", "Show or set HTTP debug flag. Default is 0. \"\"\" arg = arg.strip()", "limit, debug, 'plain') def cms_das_json(query): \"\"\" cmssh command which queries DAS data-service with", "def cms_json(arg): \"Print or set location of CMS JSON file\" if arg: if", "cmssh> jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs", "in ['file1.root', 'file2.root']: if os.path.isfile(fname): os.remove(fname) lfn = \\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/193/686/58802521-EF9A-E111-9EE7-BCAEC518FF50.root' lfn2 = \\", "stdout.replace('\\n', '') def github_issues(arg=None): \"\"\" Retrieve information about cmssh tickets, e.g. Examples: cmssh>", "du # UNIX command cmssh> du T3_US_Cornell \"\"\" arg = arg.strip() if pat_site.match(arg):", "= get_ipython().debug except: debug = 0 if debug and access2file(fname): with open(fname, 'r')", "installed with system CMSSW install area' print msg return # check if given", "os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd = '%s/xrdcp %s' % (os.path.join(root_path,", "command # \"\"\" # arg = arg.strip() # if arg: # print_info(\"Set debug", "on local filesystem or in DBS\" % arg print_error(msg) elif pat_run.match(arg): arg =", "info run=160915 cmssh> info local_file.root Please note: to enable access to RunSummary service", "and options[0] == '-f': overwrite = True else: overwrite = False except: traceback.print_exc()", "which queries DAS data-service with provided query. Examples: cmssh> das dataset=/ZMM* \"\"\" host", "system modules import os import re import sys import time import json import", "created, please edit it ' msg += 'appropriately and re-run crab command' print_info(msg)", "'cp file.root %s' % sename, 'ls %s' % sename, 'rm %s/file.root' % sename,", "CMSSW architecture. Optional parameters either <all> or <list> Examples: cmssh> arch # show", "architectures as tc_architectures from cmssh.results import RESMGR from cmssh.auth_utils import PEMMGR, working_pem from", "' list available CMSSW releases, accepts <list|all> args\\n' msg += msg_green('install ') \\", "+ '='): startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls '", "function, e.g.\\n' \\ % msg_blue('results()') msg += ' find dataset=/*Zee*\\n' msg += '", "sename, 'rmdir %s/foo' % sename, 'ls %s' % sename, ] cmd_list += ['cp", "asked for -f options = src.split(' ') if len(options) > 1 and options[0]", "architectures:' for item in archs: print item elif arg == 'all' or arg", "cms_architectures(arch_type=None): \"Return list of CMSSW architectures (aka SCRAM_ARCH)\" archs = [a for a", "officially supported under %s' \\ % (rel, arch) print_warning(msg) if arch != os.environ['SCRAM_ARCH']:", "{key: {'content': desc}} res = post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket %s'", "of UNIX commands, e.g. ls, cp, supported in cmssh. \"\"\" # system modules", "\"\"\" ipython = get_ipython() rel = rel.strip() if not rel or rel in", "= 'https://cmsweb.cern.ch' idx = 0 limit = 0 debug = 0 das_client(host, query,", "environment will be set for you' print_info(msg) def cmsrel(rel): \"\"\" cmssh release setup", "msg_green('das_json ') \\ + ' query DAS and return data in JSON format\\n'", "arg if orig_arg.find('|') != -1: arg, flt = orig_arg.split('|', 1) arg = arg.strip()", "path for pkg in ['FWCore', 'DataFormats']: pdir = '%s/%s' % (idir, pkg) if", "get_ipython() rdir = '%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name", "msg += msg_green('read ') \\ + ' read URL/local file content\\n' msg +=", "arg or '-get' in arg: cmd = 'apt%s' % arg else: msg =", "arg = arg.replace('dataset=', '').replace('file=', '').replace('block=', '') arg = arg.replace('lfn=', '').replace('run=', '') res =", "rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search %s | grep", "cmssh> find config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM cmssh> find run=160915 cmssh> find lumi dataset=/Photon/Run2012A-29Jun2012-v1/AOD cmssh> find", "to be used with ipython magic functions. It holds given command and provide", "cms+cmssw+%s ...\" % rel cmd = 'source %s; apt-get install cms+cmssw+%s' % (script,", "cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH', None) root_path", "Examples: cmssh> cmsrel # reset CMSSW environment to cmssh one cmssh> cmsrel CMSSW_5_2_4", "release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd()) if", "% idir os.symlink(link, dst) for lib in ['external', 'lib']: link = '%s/%s/%s' %", "execmd(cmd) if stderr: print \"While executing cmd=%s\" % cmd print_warning(stderr) rootsys = stdout.replace('\\n',", "msg = \"DBS instance is set to: %s\" \\ % os.environ.get('DBS_INSTANCE', 'global') print", "\"\"\" cmshelp command Examples: cmssh> cmshelp cmssh> cmshelp commands cmssh> cmshelp ls \"\"\"", "crab.cfg crab_dir = os.path.join(work_area, 'crab') crab_cfg = os.path.join(crab_dir, 'crab.cfg') if not os.path.isdir(crab_dir): os.makedirs(crab_dir)", "= os.environ.get('DYLD_LIBRARY_PATH', None) root_path = os.environ['DEFAULT_ROOT'] if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = os.path.join(root_path, 'lib') cmd", "ls run=160915 \"\"\" arg = arg.strip() res = [] try: debug = get_ipython().debug", "cmssh> lumi {190704:[1,2,3,4], 201706:[1,2,3,67]} \"\"\" try: debug = get_ipython().debug except: debug = 0", "= item args = '' else: cmd = split[0] args = split[-1] mgr.run_line_magic(cmd,", "to source pkg environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], pkg_dir) cmd", "path = os.environ['CMSSH_ROOT'] for idir in ['external', 'lib', 'root']: pdir = os.path.join(path, 'install/lib/release_%s'", "\"Create CMS command to source pkg environment\" pkg_dir = '%s/%s/%s' \\ % (os.environ['VO_CMS_SW_DIR'],", "idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if idir.find(osarch) != -1: rdir = os.path.join(\\ os.environ['VO_CMS_SW_DIR'], '%s/cms/cmssw' %", "file2.root &' % lfn2, 'ls'] cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver']", "msg += ' command to list available releases.\\n' msg += 'Use ' +", "'cms-sh' ipython.prompt_manager.in_template = '%s|\\#> ' % prompt return # check if given release", "mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try: debug = get_ipython().debug except:", "Examples: cmssh> cp file1 file2 cmssh> cp file.root T3_US_Cornell:/store/user/name cmssh> cp /store/mc/file.root T3_US_Cornell:/store/user/name", "which executes voms-proxy-init on behalf of the user Examples: cmssh> vomsinit By default", "link = '%s/src/DataFormats/FWLite/python' % path dst = '%s/DataFormats/FWLite' % idir os.symlink(link, dst) for", "please ensure that your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg:", "cmd = '%s %s' % (self.cmd, args.strip()) run(cmd) def subprocess(self, args=''): \"Execute given", "arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def cms_lumi(arg): \"\"\" Return lumi", "element. Examples: cmssh> rmdir foo cmssh> rmdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try:", "Optional parameters either <list> or <all> Examples: cmssh> releases # show installed CMSSW", "msg_blue('cmsrel') + ' command' print_error(msg) return # check existence of crab.cfg crab_dir =", "set release architecture os.environ['SCRAM_ARCH'] = rel_arch # setup environment cmssw_dir = os.environ.get('CMSSW_RELEASES', os.getcwd())", "= os.environ['CMSSH_INSTALL_DIR'] base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch,", "= post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url']) title =", "= {'release': rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item in", "'apt-get update; ' msg = 'Initialize %s apt repository ...' % arch run(cmd,", "'='): startswith = item if os.path.isfile(orig_arg) or os.path.isdir(orig_arg): cmd = 'ls ' +", "arg = arg.strip() if pat_site.match(arg): lookup(arg) else: cmd = 'du ' + arg", "%s' % sename, 'rm file.root', 'cp %s file1.root &' % lfn, 'cp %s", "run(cmd) if dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\" # debug shell", "% (self.cmd, args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list of releases installed", "flt = None if arg: arg = arg.strip() if not arg or arg", "query, idx, limit, debug, 'json') RESMGR.assign([res]) pprint.pprint(res) def cms_vomsinit(_arg=None): \"\"\" cmssh command which", "description\" print_error(msg) return if not user_input('Send this ticket', default='N'): print_info('Aborting your action') return", "pkg_dir if not os.path.isdir(pkg_dir): cmd = '' return cmd def cms_root(arg): \"\"\" cmssh", "\"\"\" arg = arg.strip() if arg: if arg not in ['0', '1']: print_error('Please", "{'content': desc}} res = post_ticket(key, files) if res.has_key('html_url'): print_status('New gist ticket %s' %", "provide correct release name,' msg += ' e.g. CMSSW_X_Y_Z<_patchN>' print msg return #", "known CMS releases, including online, tests, etc. \"\"\" if arg: print \"CMSSW releases", "within cmssh, proceed' \\ % (rel, arch) if user_input(msg, default='N'): os.environ['SCRAM_ARCH'] = arch", "your usercert.pem is mapped at https://ca.cern.ch/ca/Certificates/MapCertificate.aspx \"\"\" if not arg: return try: debug", "not in cms_archs: msg = 'Wrong architecture, please choose from the following list\\n'", "if isinstance(res, dict): ticket = pprint.pformat(res) else: ticket = res to_user = base64.decodestring('dmt1em5ldEBnbWFpbC5jb20=\\n')", "pat_block.match(arg): arg = arg.replace('block=', '') res = block_info(arg, debug) elif pat_dataset.match(arg): arg =", "rel = rel.strip() pat = pat_release if not pat.match(rel): msg = 'Fail to", "= list(set(releases)) releases.sort() for rel in releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create", "dataset_info(arg, debug) except IndexError: msg = \"Given pattern '%s' does not exist on", "default='N'): os.environ['SCRAM_ARCH'] = arch if not os.path.isdir(\\ os.path.join(os.environ['VO_CMS_SW_DIR'], arch)): bootstrap(arch) return 'ok' else:", "installed on user system rel_arch = None for arch in cms_architectures(): rel_dir =", "jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs dashboard cmssh> jobs user=my_cms_user_name \"\"\" res", "site, which lists jobs at given site - dashboard, which lists jobs of", "user=AikenOliver'] cmd_list += ['releases list', 'arch list', 'jobs', 'ls'] cmd_list += ['read https://twiki.cern.ch/twiki/bin/viewauth/CMS/SWGuideLHEtoEOS']", "if os.path.exists(dst) or len(glob.glob(dst)): cmd = \"rm %s\" % arg run(cmd) else: if", "files/dirs and CMS storate elements. Examples: cmssh> rm local_file cmssh> rm -rf local_dir", "user_input(msg, default='N'): with open('crab.cfg', 'w') as config: config.write(crabconfig()) msg = 'Your crab.cfg has", "dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link = '%s/src/DataFormats/FWLite/python' % path", "file\" if arg: if access2file(arg): os.environ['CMS_JSON'] = arg print_info('CMS_JSON: %s' % arg) else:", "will be posted as anonymous gist ticket' print_info(msg) if not user_input('Proceed', default='N'): return", "is not installed within cmssh, proceed' \\ % (rel, arch) if user_input(msg, default='N'):", "is allowed for %s look-up' % startswith print_error(msg) else: cmd = 'ls '", "given user Examples: cmssh> jobs cmssh> jobs list cmssh> jobs site=T2_US_UCSD cmssh> jobs", "pkgs: link = '%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' % (idir, pkg)", "limit = 0 debug = 0 das_client(host, query, idx, limit, debug, 'plain') def", "os import re import sys import time import json import glob import shutil", "dyld_path: os.environ['DYLD_LIBRARY_PATH'] = dyld_path #def debug(arg): # \"\"\" # debug shell command #", "# in CMS cmsenv is an alias to: eval `scramv1 runtime -sh`' msg", "front of edm one, since # execute method will run in current shell", "% fname ipython.run_line_magic('edmFileUtil', cmd) if debug: if ipython.find_line_magic('edmDumpEventContent'): ipython.run_line_magic('edmDumpEventContent', fname) else: cms_ls(arg) def", "msg = 'Available cmssh commands:\\n' msg += msg_green('find ') \\ + ' search", "if os.path.isdir(pdir): shutil.rmtree(pdir) os.makedirs(pdir) # Set cmssh prompt prompt = 'cms-sh' ipython.prompt_manager.in_template =", "res] releases = list(set(releases)) releases.sort() for rel in releases: print rel installed_releases() def", "userdn: cmd = \"voms-proxy-info -identity\" stdout, stderr = execmd(cmd) os.environ['USER_DN'] = stdout.replace('\\n', '')", "did not provide bug description\" print_error(msg) return if not user_input('Send this ticket', default='N'):", "given query in CMS data-services. cmssh find command lookup given query in CMS", "from recent query\"\"\" return RESMGR def cms_commands(_arg=None): \"\"\" cmssh command which lists all", "elif pat_release.match(arg): arg = arg.replace('release=', '') res = release_info(arg, debug) elif startswith: msg", "cmssh command. Examples: cmssh> du # UNIX command cmssh> du T3_US_Cornell \"\"\" arg", "| grep -v -i fwlite' % (script, rel) run(cmd) if rel.lower().find('patch') != -1:", "user=%s' % user) res = jobsummary({'user': user}) if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt)", "= 'Please check the you provide correct release name,' msg += ' e.g.", "pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res = dataset_info(arg, debug) except IndexError: msg", "= arg.strip() if arg: if validate_dbs_instance(arg): os.environ['DBS_INSTANCE'] = arg print \"Switch to %s", "0 orig_arg = arg if orig_arg.find('>') != -1: arg, out = orig_arg.split('>', 1)", "cmssh> info local_file.root Please note: to enable access to RunSummary service please ensure", "\"\"\" CMS rm command works with local files/dirs and CMS storate elements. Examples:", "job queue or provides information about jobs at give site or for given", "system\" _osname, osarch = osparameters() releases = [] for idir in os.listdir(os.environ['VO_CMS_SW_DIR']): if", "not rel or not work_area: msg = 'In order to run crab command", "Retrieve information about cmssh tickets, e.g. Examples: cmssh> tickets # list all cmssh", "xrdcp via cmssh shell Examples: cmssh> xrdcp /a/b/c.root file:////tmp.file.root \"\"\" dyld_path = os.environ.get('DYLD_LIBRARY_PATH',", "%s\" % arg) else: try: status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def", "root = os.environ.get('CMSSH_ROOT') path = os.path.join(root, 'cmssh/DEMO') with open(path, 'r') as demo_file: print", "it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg == 'new': msg = 'You can post", ": ' + msg_blue('commands') msg += '\\ncmssh command help : ' + msg_blue('cmshelp", "user, which lists jobs of given user Examples: cmssh> jobs cmssh> jobs list", "releases: print rel installed_releases() def pkg_init(pkg_dir): \"Create CMS command to source pkg environment\"", "to enable access to RunSummary service please ensure that your usercert.pem is mapped", "= get_ipython().magics_manager.lsmagic() cmds = [k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort()", "'ls'] cmd_list += ['find user=oliver', 'jobs list', 'jobs user=AikenOliver'] cmd_list += ['releases list',", "files) if res.has_key('html_url'): print_status('New gist ticket %s' % res['html_url']) title = 'cmssh gist", "if arg: if arg not in ['0', '1']: print_error('Please provide 0/1 for debug_http", "is DBS global instance\\n' msg += msg_green('mkdir/rmdir ') + ' mkdir/rmdir command, '", "action') return key = '<KEY>' % time.strftime(\"%Y-%m-%d %H:%M:%S\", time.gmtime(time.time())) files = {key: {'content':", "debug = 0 orig_arg = arg if orig_arg.find('|') != -1: arg, flt =", "(rel, os.getcwd()) def cmsexe(cmd): \"\"\" Execute given command within CMSSW environment \"\"\" vdir", "crab %s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command to execute CMSSW", "user Examples: cmssh> vomsinit By default it applies the following options -rfc -voms", "etc. \"\"\" if arg: print \"CMSSW releases for %s platform\" % platform() res", "msg_green('ls ') \\ + ' list file/LFN, e.g. ls local.file or ls /store/user/file.root\\n'", "dataset, block, run, release, file). Examples: cmssh> ls # UNIX command cmssh> ls", "rel_dir)): return 'ok' output = [] for arch, status in get_release_arch(rel): if not", "directory on local filesystem or remote CMS storage element. Examples: cmssh> mkdir foo", "subprocess.call since it invokes # wget/curl and it can be spawned into serate", "% (os.environ['VO_CMS_SW_DIR'], os.environ['SCRAM_ARCH'], rel) fix_so(idir) print \"Create user area for %s release ...\"", "= get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: cp <options> source_file", "subprocess.call(cmd, shell=True) else: run(\"cp %s %s\" % (src, dst)) else: try: status =", "CMSSH pager to %s\" % arg else: val = os.environ.get('CMSSH_PAGER', None) msg =", "if res: RESMGR.assign(res) list_results(res, debug=True, flt=flt) def cms_config(arg): \"\"\" Return configuration object for", "get_data, send_email from cmssh.regex import pat_release, pat_site, pat_dataset, pat_block from cmssh.regex import pat_lfn,", "\"CMSSW releases for %s platform\" % platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases", "\"Execute apt commands\" if '-cache' in arg or '-get' in arg: cmd =", "\"\"\" try: debug = get_ipython().debug except: debug = 0 arg = arg.replace('dataset=', '').replace('file=',", "os.environ['CMSSH_PAGER'] = arg print \"Set CMSSH pager to %s\" % arg else: val", "debug_http command') return print_info(\"Set HTTP debug level to %s\" % arg) os.environ['HTTPDEBUG'] =", "'%s/src/FWCore/%s/python' % (path, pkg) dst = '%s/FWCore/%s' % (idir, pkg) os.symlink(link, dst) link", "os.path.isdir(crab_dir): os.makedirs(crab_dir) os.chdir(crab_dir) if not os.path.isfile(crab_cfg): msg = 'No crab.cfg file found in", "flt = orig_arg.split('|', 1) arg = arg.strip() else: flt = None if arg:", "output.append((rel_arch, status)) return output def check_release_arch(rel): \"Check release/architecture\" # check if given release", "and return data in JSON format\\n' msg += msg_green('jobs ') \\ + '", "= os.listdir(apt_dir) dirs.sort() name = 'etc/profile.d/init.sh' script = os.path.join(os.path.join(apt_dir, dirs[-1]), name) return script", "series of integration tests for cmssh\" for fname in ['file1.root', 'file2.root']: if os.path.isfile(fname):", "' + msg_blue('Enter') + '\\n' print msg while True: try: uinput = raw_input()", "if arg.strip() == 'commands': cms_commands() return ipython = get_ipython() if arg[0] == '('", "options from given arg string\"\"\" opts = [] for par in arg.split(): if", "-f file:///%s' % fname else: cmd = '-e -f %s' % fname ipython.run_line_magic('edmFileUtil',", "...\" % rel cmd = 'source %s; apt-get install cms+cmssw-patch+%s' % (script, rel)", "if len(options) > 1 and options[0] == '-f': overwrite = True else: overwrite", "try: debug = get_ipython().debug except: debug = 0 if not arg: print_error(\"Usage: rmdir", "cms_read(arg): \"\"\" cmssh command to read provided HTML page (by default output dumps", "run(cmd, sdir, msg=msg, debug=debug, shell=True) def get_release_arch(rel): \"Return architecture for given CMSSW release\"", "= arg.strip() # if arg: # print_info(\"Set debug level to %s\" % arg)", "DAS and return data in JSON format\\n' msg += msg_green('jobs ') \\ +", "send_email(to_user, email, title, ticket) else: res = get_tickets(arg) RESMGR.assign(res) pprint.pprint(res) def demo(_arg=None): \"Show", "https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq \"\"\" msg = \\ 'CRAB FAQ: https://twiki.cern.ch/twiki/bin/view/CMSPublic/SWGuideCrabFaq' print_info(msg) # check if release", "verbose=debug) print_status(status) except: traceback.print_exc() def cms_mkdir(arg): \"\"\" cmssh mkdir command creates directory on", "base = os.path.realpath('%s/CMSSW' % root) path = '%s/%s/cms/cmssw/%s' % (base, rel_arch, rel) os.environ['CMSSW_BASE']", "= 'source $CRAB_ROOT/crab.sh; crab %s' % arg cmsexe(cmd) def cmsrun(arg): \"\"\" cmssh command", "% arg else: val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is set", "commands\" if '-cache' in arg or '-get' in arg: cmd = 'apt%s' %", "#pylint: disable-msg=W0702 \"\"\" Set of UNIX commands, e.g. ls, cp, supported in cmssh.", "args.strip()) subprocess.call(cmd, shell=True) def installed_releases(): \"Print a list of releases installed on a", "= pkg_init('lcg/root') pkgs_init = '%s %s %s' % (pcre_init, gcc_init, root_init) cmd =", "= '\\nCheck release architecture status: %s' % status print msg return print \"Searching", "status = mkdir(arg, verbose=debug) print_status(status) except: traceback.print_exc() def cms_ls(arg): \"\"\" cmssh ls command", "+ arg cmd = cmd.strip() subprocess.call(cmd, shell=True) def lookup(arg): \"\"\" Perform lookup of", "\"\"\" cmssh jobs command lists local job queue or provides information about jobs", "subprocess(self, args=''): \"Execute given command in original shell environment\" cmd = '%s %s'", "new ticket from cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg", "supported apt command' raise Exception(msg) run(cmd) def cms_das(query): \"\"\" cmssh command which queries", "on behalf of the user Examples: cmssh> vomsinit By default it applies the", "rel} releases = get_data(tc_url('py_getReleaseArchitectures'), args) output = [] for item in releases: rel_arch", "rel + '/src')): os.chdir(os.path.join(cmssw_dir, rel + '/src')) else: os.chdir(cmssw_dir) cmd = \"scramv1 project", "config dataset=/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM \"\"\" if arg: arg = arg.strip() if pat_dataset.match(arg): reqmgr(arg.replace('dataset=', '')) def", "pkg_init(pkg_dir): \"Create CMS command to source pkg environment\" pkg_dir = '%s/%s/%s' \\ %", "CMSSW %s\" % rel run(cmd) os.chdir(os.path.join(rel, 'src')) # get ROOT from run-time environment", "cmssh command to install given CMSSW release. Examples: cmssh> install CMSSW_5_2_4 \"\"\" rel", "= jobsummary({'site': site}) elif pat_user.match(arg): user = arg.replace('user=', '') print_info('Dashboard information, user=%s' %", "msg = 'Release ' + msg_red(rel) msg += ' is not yet installed", "cms+cmssw-patch+%s' % (script, rel) else: print \"Installing cms+cmssw+%s ...\" % rel cmd =", "run_lumi_info(arg, debug) def cms_json(arg): \"Print or set location of CMS JSON file\" if", "will not catch it run(cmd, sdir, 'bootstrap.log', msg, debug, shell=True, call=True) cmd =", "platform\" % platform() res = release_info(release=None, rfilter=arg) RESMGR.assign(res) releases = [str(r) for r", "lists local job queue or provides information about jobs at give site or", "from cmssh.das import das_client from cmssh.url_utils import get_data, send_email from cmssh.regex import pat_release,", "cmds = [k for k, v in mdict['line'].items() if v.func_name.find('cms_')!=-1] cmds.sort() for key", "from given arg string\"\"\" opts = [] for par in arg.split(): if len(par)", "def cms_pager(arg=None): \"\"\" cmssh command to show or set internal pager Examples: cmssh>", "access2file from cmssh.utils import osparameters, check_voms_proxy, run, user_input from cmssh.utils import execmd, touch,", "cmssh> mkdir foo cmssh> mkdir T3_US_Cornell:/store/user/user_name/foo \"\"\" arg = arg.strip() try: debug =", "% rel script = get_apt_init(os.environ['SCRAM_ARCH']) cmd = 'source %s; apt-cache search %s |", "'').strip() if dst == '.': dst = os.getcwd() # check if src still", "environment and creates user based directory structure. Examples: cmssh> cmsrel # reset CMSSW", "= False except: traceback.print_exc() return try: debug = get_ipython().debug except: debug = 0", "/store/user/file.root\\n' msg += msg_green('rm ') + ' remove file/LFN, ' \\ + 'e.g.", "os.path.join(os.environ['HOME'], '.globus/usercert.pem') with working_pem(PEMMGR.pem) as key: run(\"voms-proxy-destroy\") cmd = \"voms-proxy-init -rfc -voms cms:/cms", "= cmd def execute(self, args=''): \"Execute given command in current shell environment\" cmd", "shell=True, call=True) cmd = 'source `find %s/%s/external/apt -name init.sh | tail -1`; '", "pat_se, pat_user from cmssh.tagcollector import architectures as tc_architectures from cmssh.results import RESMGR from", "releases.sort() print \"\\nInstalled releases:\" for rel in releases: print rel else: msg =", "get ROOT from run-time environment cmd = 'eval `scramv1 runtime -sh`; env |", "elif pat_dataset.match(arg): arg = arg.replace('dataset=', '') try: res = dataset_info(arg, debug) except IndexError:", "check if given release/architecture is in place status = check_release_arch(rel) if status !=", "arg.replace('file=', '') if arg and os.path.isfile(fname): mtype = mimetypes.guess_type(arg) if mtype[0]: print \"Mime", "tail -1`; ' \\ % (swdir, arch) cmd += 'apt-get install external+fakesystem+1.0; '", "% (key, cert) run(cmd) userdn = os.environ.get('USER_DN', '') if not userdn: cmd =", "ticket from cmssh # or post it at https://github.com/vkuznet/cmssh/issues/new \"\"\" if arg ==", "\\ '/store/data/Run2012A/ElectronHad/AOD/PromptReco-v1/000/190/450/84087548-ED80-E111-A737-0025901D5D80.root' dataset = '/PhotonHad/Run2011A-PromptReco-v1/RECO' dataset2 = '/SUSY_LM9_sftsht_8TeV-pythia6/Summer12-START50_V13-v1/GEN-SIM' run = 160915 sename =", "arg[0] == '(' and arg[-1] == ')': arg = arg[1:-1] for case in", "'\\nYou are not allowed to install new release, ' msg += 'since cmssh", "cmssh> das dataset=/ZMM* \"\"\" host = 'https://cmsweb.cern.ch' idx = 0 limit = 0", "os.uname()[0] == 'Darwin' and arg == '-submit': crab_submit_remotely(rel, work_area) return cmd = 'source", "and osarch == 'ia32': return 'OSX/ia32 is not supported in CMSSW' return 'no", "import dbs_instances, tc_url from cmssh.das import das_client from cmssh.url_utils import get_data, send_email from", "re-run crab command' print_info(msg) print \"cwd:\", os.getcwd() return if os.uname()[0] == 'Darwin' and", "one, since # execute method will run in current shell environment # old", "name) # Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel # final", "ipython.find_magic(case) if func: doc = func.func_doc break else: doc = 'Documentation is not", "of current user - user, which lists jobs of given user Examples: cmssh>", "\\ % (os.environ['SCRAM_ARCH'], arch) print_warning(msg) msg = '\\n%s/%s is not installed within cmssh,", "RESMGR.assign(res) releases = [str(r) for r in res] releases = list(set(releases)) releases.sort() for", "arg else: val = os.environ.get('CMSSH_PAGER', None) msg = \"cmssh pager is set to:", "'Unable to identify CMSSW environment, please run first: ' msg = msg_red(msg) msg", "print_error(\"Usage: cp <options> source_file target_{file,directory}\") pat = pat_se orig = src.split(' ')[-1] if", "given ticket id cmssh> ticket new # post new ticket from cmssh #", "if arg not in ['0', '1']: print_error('Please provide 0/1 for debug_http command') return", "cmssh> find lumi run=190704 cmssh> find user=oliver List of supported entities: dataset, block,", "') + ' invoke ROOT\\n' msg += msg_green('du ') \\ + ' display", "= arg.strip() read(arg, out, debug) def cms_releases(arg=None): \"\"\" List available CMS releases. Optional", "'r') as demo_file: print demo_file.read() def results(): \"\"\"Return results from recent query\"\"\" return", "swdir = os.environ['VO_CMS_SW_DIR'] arch = os.environ['SCRAM_ARCH'] cmd = 'sh -x %s/bootstrap.sh setup -path", "'%s/bin/%s' % (rel_dir, rel_arch) reldir = os.path.join(os.environ['VO_CMS_SW_DIR'], rdir) for name in os.listdir(reldir): fname", "# CMSSW commands def bootstrap(arch): \"Bootstrap new architecture\" swdir = os.environ['VO_CMS_SW_DIR'] arch =", "bootstrap command in subprocess.call since it invokes # wget/curl and it can be", "\"\"\" lookup(arg) def cms_du(arg): \"\"\" cmssh disk utility cmssh command. Examples: cmssh> du", "is not officially supported under %s' \\ % (rel, arch) print_warning(msg) if arch", "method to execute it in a shell \"\"\" def __init__(self, cmd): self.cmd =", "# Set cmssh prompt ipython.prompt_manager.in_template = '%s|\\#> ' % rel # final message", "= \\ ['se', 'site', 'lfn', 'dataset', 'block', 'run', 'release', 'file'] for item in", "= None startswith = None entities = \\ ['se', 'site', 'lfn', 'dataset', 'block',", "traceback import subprocess # cmssh modules from cmssh.iprint import msg_red, msg_green, msg_blue from", "about given CMS entity, ' \\ + 'e.g. info run=160915\\n' msg += msg_green('das" ]
[ "more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu", "the distance must be calculated self.histograms = histograms # this is the cost", "ACT_EMD: \"\"\" EMD stands for Earth Mover's Distance - Mallows distance or 1st", "distance must be calculated self.histograms = histograms # this is the cost matrix", "of iterations of ACT approaches infinity, ACT becomes the same as ICT. For", "unit of 'dirt' from coordinate i to j self.cost_matrix = cost_matrix def act(self):", "distance between two probability distributions. ACT or Approximate Constrained Transfers is a linear", "__init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these are the histograms to", "which the distance must be calculated self.histograms = histograms # this is the", "the cost matrix showing the cost of # transporting one unit of 'dirt'", "two distributions, is a measure of the distance between two probability distributions. ACT", "is a measure of the distance between two probability distributions. ACT or Approximate", "For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu =", "the cost of # transporting one unit of 'dirt' from coordinate i to", "ACT or Approximate Constrained Transfers is a linear compelixty approximation of the ICT", "to which the distance must be calculated self.histograms = histograms # this is", "distributions, is a measure of the distance between two probability distributions. ACT or", "histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these are the histograms to which", "a symmetric lower bound approximation of the Eath Mover's Distance Note that as", "a measure of the distance between two probability distributions. ACT or Approximate Constrained", "read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu #", "probability distributions. ACT or Approximate Constrained Transfers is a linear compelixty approximation of", "which is a symmetric lower bound approximation of the Eath Mover's Distance Note", "Approximate Constrained Transfers is a linear compelixty approximation of the ICT or Iterative", "cost matrix showing the cost of # transporting one unit of 'dirt' from", "use_gpu # these are the histograms to which the distance must be calculated", "Mover's Distance Note that as the number of iterations of ACT approaches infinity,", "EMD stands for Earth Mover's Distance - Mallows distance or 1st Wasserstein distance", "lower bound approximation of the Eath Mover's Distance Note that as the number", "Distance - Mallows distance or 1st Wasserstein distance between the two distributions, is", "of ACT approaches infinity, ACT becomes the same as ICT. For more, read", "compelixty approximation of the ICT or Iterative Constrained Transfers, which is a symmetric", "'dirt' from coordinate i to j self.cost_matrix = cost_matrix def act(self): # rename?", "class ACT_EMD: \"\"\" EMD stands for Earth Mover's Distance - Mallows distance or", "approximation of the ICT or Iterative Constrained Transfers, which is a symmetric lower", "Earth Mover's Distance - Mallows distance or 1st Wasserstein distance between the two", "Wasserstein distance between the two distributions, is a measure of the distance between", "ACT approaches infinity, ACT becomes the same as ICT. For more, read -", "infinity, ACT becomes the same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\"", "\"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these are the", "Transfers, which is a symmetric lower bound approximation of the Eath Mover's Distance", "or Approximate Constrained Transfers is a linear compelixty approximation of the ICT or", "1st Wasserstein distance between the two distributions, is a measure of the distance", "https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these are", "the distance between two probability distributions. ACT or Approximate Constrained Transfers is a", "cost_matrix=None): self.use_gpu = use_gpu # these are the histograms to which the distance", "\"\"\" EMD stands for Earth Mover's Distance - Mallows distance or 1st Wasserstein", "Note that as the number of iterations of ACT approaches infinity, ACT becomes", "of the ICT or Iterative Constrained Transfers, which is a symmetric lower bound", "use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these are the histograms to which the", "same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True,", "the ICT or Iterative Constrained Transfers, which is a symmetric lower bound approximation", "between two probability distributions. ACT or Approximate Constrained Transfers is a linear compelixty", "ACT becomes the same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def", "# transporting one unit of 'dirt' from coordinate i to j self.cost_matrix =", "Distance Note that as the number of iterations of ACT approaches infinity, ACT", "distributions. ACT or Approximate Constrained Transfers is a linear compelixty approximation of the", "calculated self.histograms = histograms # this is the cost matrix showing the cost", "= use_gpu # these are the histograms to which the distance must be", "the two distributions, is a measure of the distance between two probability distributions.", "two probability distributions. ACT or Approximate Constrained Transfers is a linear compelixty approximation", "of # transporting one unit of 'dirt' from coordinate i to j self.cost_matrix", "ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu", "- https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these", "# this is the cost matrix showing the cost of # transporting one", "this is the cost matrix showing the cost of # transporting one unit", "is a linear compelixty approximation of the ICT or Iterative Constrained Transfers, which", "Iterative Constrained Transfers, which is a symmetric lower bound approximation of the Eath", "= histograms # this is the cost matrix showing the cost of #", "showing the cost of # transporting one unit of 'dirt' from coordinate i", "iterations of ACT approaches infinity, ACT becomes the same as ICT. For more,", "are the histograms to which the distance must be calculated self.histograms = histograms", "or Iterative Constrained Transfers, which is a symmetric lower bound approximation of the", "the histograms to which the distance must be calculated self.histograms = histograms #", "torch class ACT_EMD: \"\"\" EMD stands for Earth Mover's Distance - Mallows distance", "import torch class ACT_EMD: \"\"\" EMD stands for Earth Mover's Distance - Mallows", "Mallows distance or 1st Wasserstein distance between the two distributions, is a measure", "be calculated self.histograms = histograms # this is the cost matrix showing the", "the Eath Mover's Distance Note that as the number of iterations of ACT", "of 'dirt' from coordinate i to j self.cost_matrix = cost_matrix def act(self): #", "stands for Earth Mover's Distance - Mallows distance or 1st Wasserstein distance between", "ICT or Iterative Constrained Transfers, which is a symmetric lower bound approximation of", "self.use_gpu = use_gpu # these are the histograms to which the distance must", "distance or 1st Wasserstein distance between the two distributions, is a measure of", "Eath Mover's Distance Note that as the number of iterations of ACT approaches", "symmetric lower bound approximation of the Eath Mover's Distance Note that as the", "Mover's Distance - Mallows distance or 1st Wasserstein distance between the two distributions,", "# these are the histograms to which the distance must be calculated self.histograms", "Transfers is a linear compelixty approximation of the ICT or Iterative Constrained Transfers,", "of the distance between two probability distributions. ACT or Approximate Constrained Transfers is", "between the two distributions, is a measure of the distance between two probability", "measure of the distance between two probability distributions. ACT or Approximate Constrained Transfers", "approximation of the Eath Mover's Distance Note that as the number of iterations", "histograms # this is the cost matrix showing the cost of # transporting", "from coordinate i to j self.cost_matrix = cost_matrix def act(self): # rename? pass", "must be calculated self.histograms = histograms # this is the cost matrix showing", "that as the number of iterations of ACT approaches infinity, ACT becomes the", "a linear compelixty approximation of the ICT or Iterative Constrained Transfers, which is", "the number of iterations of ACT approaches infinity, ACT becomes the same as", "bound approximation of the Eath Mover's Distance Note that as the number of", "as the number of iterations of ACT approaches infinity, ACT becomes the same", "linear compelixty approximation of the ICT or Iterative Constrained Transfers, which is a", "distance between the two distributions, is a measure of the distance between two", "one unit of 'dirt' from coordinate i to j self.cost_matrix = cost_matrix def", "Constrained Transfers, which is a symmetric lower bound approximation of the Eath Mover's", "histograms to which the distance must be calculated self.histograms = histograms # this", "self.histograms = histograms # this is the cost matrix showing the cost of", "is the cost matrix showing the cost of # transporting one unit of", "the same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms,", "of the Eath Mover's Distance Note that as the number of iterations of", "is a symmetric lower bound approximation of the Eath Mover's Distance Note that", "number of iterations of ACT approaches infinity, ACT becomes the same as ICT.", "approaches infinity, ACT becomes the same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf", "matrix showing the cost of # transporting one unit of 'dirt' from coordinate", "Constrained Transfers is a linear compelixty approximation of the ICT or Iterative Constrained", "transporting one unit of 'dirt' from coordinate i to j self.cost_matrix = cost_matrix", "or 1st Wasserstein distance between the two distributions, is a measure of the", "- Mallows distance or 1st Wasserstein distance between the two distributions, is a", "these are the histograms to which the distance must be calculated self.histograms =", "as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self, histograms, use_gpu=True, cost_matrix=None):", "cost of # transporting one unit of 'dirt' from coordinate i to j", "for Earth Mover's Distance - Mallows distance or 1st Wasserstein distance between the", "def __init__(self, histograms, use_gpu=True, cost_matrix=None): self.use_gpu = use_gpu # these are the histograms", "becomes the same as ICT. For more, read - https://arxiv.org/pdf/1812.02091.pdf \"\"\" def __init__(self," ]
[ "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "writing, software # distributed under the License is distributed on an \"AS IS\"", "Unless required by applicable law or agreed to in writing, software # distributed", "collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args = args", "[] kwargs = kwargs or {} while limit > 0: try: return fn(*args,", "See the # License for the specific language governing permissions and limitations #", "\"License\"); you may # not use this file except in compliance with the", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "the License. You may obtain # a copy of the License at #", "law or agreed to in writing, software # distributed under the License is", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); you", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "System, Inc. # All Rights Reserved. # # Licensed under the Apache License,", "express or implied. See the # License for the specific language governing permissions", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "not use this file except in compliance with the License. You may obtain", "as e: if not exceptions or not isinstance(e, exceptions): raise if delay: greenthread.sleep(delay)", "exceptions or not isinstance(e, exceptions): raise if delay: greenthread.sleep(delay) limit -= 1 raise", "under the License. import collections from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "with the License. You may obtain # a copy of the License at", "for the specific language governing permissions and limitations # under the License. import", "greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0):", "delay=0): args = args or [] kwargs = kwargs or {} while limit", "limit > 0: try: return fn(*args, **kwargs) except Exception as e: if not", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "{} while limit > 0: try: return fn(*args, **kwargs) except Exception as e:", "License for the specific language governing permissions and limitations # under the License.", "governing permissions and limitations # under the License. import collections from eventlet import", "except Exception as e: if not exceptions or not isinstance(e, exceptions): raise if", "2.0 (the \"License\"); you may # not use this file except in compliance", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "fn(*args, **kwargs) except Exception as e: if not exceptions or not isinstance(e, exceptions):", "kwargs or {} while limit > 0: try: return fn(*args, **kwargs) except Exception", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "and limitations # under the License. import collections from eventlet import greenthread RouteRule", "use this file except in compliance with the License. You may obtain #", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "collections from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None,", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "Inc. # All Rights Reserved. # # Licensed under the Apache License, Version", "compliance with the License. You may obtain # a copy of the License", "or {} while limit > 0: try: return fn(*args, **kwargs) except Exception as", "while limit > 0: try: return fn(*args, **kwargs) except Exception as e: if", "License. import collections from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def", "License, Version 2.0 (the \"License\"); you may # not use this file except", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "= kwargs or {} while limit > 0: try: return fn(*args, **kwargs) except", "def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args = args or [] kwargs", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "implied. See the # License for the specific language governing permissions and limitations", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "OF ANY KIND, either express or implied. See the # License for the", "2015 Brocade Communications System, Inc. # All Rights Reserved. # # Licensed under", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "e: if not exceptions or not isinstance(e, exceptions): raise if delay: greenthread.sleep(delay) limit", "you may # not use this file except in compliance with the License.", "agreed to in writing, software # distributed under the License is distributed on", "permissions and limitations # under the License. import collections from eventlet import greenthread", "RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args", "Communications System, Inc. # All Rights Reserved. # # Licensed under the Apache", "(the \"License\"); you may # not use this file except in compliance with", "# under the License. import collections from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule',", "the specific language governing permissions and limitations # under the License. import collections", "0: try: return fn(*args, **kwargs) except Exception as e: if not exceptions or", "may # not use this file except in compliance with the License. You", "KIND, either express or implied. See the # License for the specific language", "eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None,", "either express or implied. See the # License for the specific language governing", "args=None, kwargs=None, exceptions=None, limit=1, delay=0): args = args or [] kwargs = kwargs", "kwargs = kwargs or {} while limit > 0: try: return fn(*args, **kwargs)", "# # Unless required by applicable law or agreed to in writing, software", "file except in compliance with the License. You may obtain # a copy", "import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1,", "this file except in compliance with the License. You may obtain # a", "not exceptions or not isinstance(e, exceptions): raise if delay: greenthread.sleep(delay) limit -= 1", "# Unless required by applicable law or agreed to in writing, software #", "Copyright 2015 Brocade Communications System, Inc. # All Rights Reserved. # # Licensed", "limitations # under the License. import collections from eventlet import greenthread RouteRule =", "by applicable law or agreed to in writing, software # distributed under the", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "the License. import collections from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop')", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "or implied. See the # License for the specific language governing permissions and", "args = args or [] kwargs = kwargs or {} while limit >", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "License. You may obtain # a copy of the License at # #", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "language governing permissions and limitations # under the License. import collections from eventlet", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "> 0: try: return fn(*args, **kwargs) except Exception as e: if not exceptions", "retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args = args or [] kwargs =", "**kwargs) except Exception as e: if not exceptions or not isinstance(e, exceptions): raise", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "exceptions=None, limit=1, delay=0): args = args or [] kwargs = kwargs or {}", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "ANY KIND, either express or implied. See the # License for the specific", "the # License for the specific language governing permissions and limitations # under", "except in compliance with the License. You may obtain # a copy of", "next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args = args or []", "= args or [] kwargs = kwargs or {} while limit > 0:", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "to in writing, software # distributed under the License is distributed on an", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "specific language governing permissions and limitations # under the License. import collections from", "# Copyright 2015 Brocade Communications System, Inc. # All Rights Reserved. # #", "or [] kwargs = kwargs or {} while limit > 0: try: return", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "required by applicable law or agreed to in writing, software # distributed under", "'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args = args or", "applicable law or agreed to in writing, software # distributed under the License", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "if not exceptions or not isinstance(e, exceptions): raise if delay: greenthread.sleep(delay) limit -=", "args or [] kwargs = kwargs or {} while limit > 0: try:", "from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None,", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "in compliance with the License. You may obtain # a copy of the", "kwargs=None, exceptions=None, limit=1, delay=0): args = args or [] kwargs = kwargs or", "# not use this file except in compliance with the License. You may", "or agreed to in writing, software # distributed under the License is distributed", "# License for the specific language governing permissions and limitations # under the", "= collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn, args=None, kwargs=None, exceptions=None, limit=1, delay=0): args =", "Brocade Communications System, Inc. # All Rights Reserved. # # Licensed under the", "return fn(*args, **kwargs) except Exception as e: if not exceptions or not isinstance(e,", "# All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "Exception as e: if not exceptions or not isinstance(e, exceptions): raise if delay:", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "import collections from eventlet import greenthread RouteRule = collections.namedtuple('RouteRule', 'dest_cidr, next_hop') def retry(fn,", "try: return fn(*args, **kwargs) except Exception as e: if not exceptions or not", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "limit=1, delay=0): args = args or [] kwargs = kwargs or {} while", "in writing, software # distributed under the License is distributed on an \"AS", "Version 2.0 (the \"License\"); you may # not use this file except in" ]
[ "len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout) while True: c = self.read(1) if", "== 0x2FDE and port.pid == 0x0003) modi_ports = [port for port in stl.comports()", "ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else:", "line = bytearray() modi_timeout = self.Timeout(self._timeout) while True: c = self.read(1) if c:", "expected: break if size is not None and len(line) >= size: break else:", "hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic(). This function is only # supported", "time.time() recv = serialport.read_until(b\"}\") dt = time.time() - init if recv == None:", "modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value) return", "for index, value in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1", "waiting = self.serial_port.inWaiting() return waiting @property def port(self): return self._port @port.setter def port(self,", "def expired(self): \"\"\"Return a boolean, telling if the timeout has expired\"\"\" return self.target_time", ">= size: break else: break if modi_timeout.expired(): break return bytes(line) def read_all(self): if", "opened\") if size == None and self.type == self.SERIAL_MODE_COMPORT: size = 1 return", "__init__(self, duration): \"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite = (duration is None)", "are set accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic(). This", "baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout,", "break if modi_timeout.expired(): break return bytes(line) def read_all(self): if not self.is_open: raise Exception(\"serialport", "else: return False class Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using time.monotonic() if", "return (port.vid == 0x2FDE and port.pid == 0x0003) modi_ports = [port for port", "# supported by Python 3.3 and above. It returns a time in seconds", "timeout operations. Using time.monotonic() if available or time.time() in all other cases. The", "self._port is not None: self.open(self._port) def open(self, port): self._port = port if sys.platform.startswith(\"win\"):", "> self.duration: # clock jumped, recalculate self.target_time = self.TIME() + self.duration return self.duration", "= winusb else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port", "the program, enter 'exit'.\") while not stop: input_data = input() if input_data ==", "list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port,", "break else: break if modi_timeout.expired(): break return bytes(line) def read_all(self): if not self.is_open:", "raise Exception(\"serialport is not opened\") if type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data)", "self.is_open: raise Exception(\"serialport is not opened\") lenterm = len(expected) line = bytearray() modi_timeout", "return max(0, delta) def restart(self, duration): \"\"\"\\ Restart a timeout, only supported if", "size is not None and len(line) >= size: break else: break if modi_timeout.expired():", "def handle_received(serialport): global stop while not stop: init = time.time() recv = serialport.read_until(b\"}\")", "self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport is not opened\")", "size == None and self.type == self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def", "@property def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate = value self.serial_port.baudrate", "self.target_time = self.TIME() + duration else: self.target_time = None def expired(self): \"\"\"Return a", "break return bytes(line) def read_all(self): if not self.is_open: raise Exception(\"serialport is not opened\")", "self.Timeout(self._timeout) while True: c = self.read(1) if c: line += c if line[-lenterm:]", "the timeout expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite: return None else: delta", "exclusive=True) self.serial_port = ser self.is_open = True def close(self): if self.is_open: self.serial_port.close() def", "This is compatible with all # Python versions but has issues if the", "stop: input_data = input() if input_data == \"exit\": stop = True break serialport.close()", "duration # main if __name__ == \"__main__\": stop = False def handle_received(serialport): global", "not self.is_open: raise Exception(\"serialport is not opened\") return self.serial_port.read_all() def flush(self): if not", "self.serial_port.read_all() def flush(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush() def", "= time.time def __init__(self, duration): \"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite =", "<= 0 def time_left(self): \"\"\"Return how many seconds are left until the timeout", "if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\ Abstraction", "is not affected by system clock # adjustments. TIME = time.monotonic else: #", "but is not affected by system clock # adjustments. TIME = time.monotonic else:", "if size is not None and len(line) >= size: break else: break if", "value): self._port = value self.serial_port.port = value @property def baudrate(self): return self._baudrate @baudrate.setter", "921600, timeout = 0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port = port", "self.serial_port.baudrate = value @property def timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout", "bytearray() modi_timeout = self.Timeout(self._timeout) while True: c = self.read(1) if c: line +=", "self.target_time = self.TIME() + self.duration return self.duration else: return max(0, delta) def restart(self,", "def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout = value @property def dtr(self): if", "global stop while not stop: init = time.time() recv = serialport.read_until(b\"}\") dt =", "list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value) return info_list class", "Exception(\"serialport is not opened\") if type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data) def", "{int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports() if", "is None) self.is_non_blocking = (duration == 0) self.duration = duration if duration is", "write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open = True def close(self): if self.is_open: self.serial_port.close()", "delta) def restart(self, duration): \"\"\"\\ Restart a timeout, only supported if a timeout", "= self.TIME() + duration # main if __name__ == \"__main__\": stop = False", "Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using time.monotonic() if available or time.time() in", "in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port", "issues if the clock is adjusted while the # timeout is running. TIME", "= time.monotonic else: # Timeout implementation with time.time(). This is compatible with all", "opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput()", "telling if the timeout has expired\"\"\" return self.target_time is not None and self.time_left()", "self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in", "modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths()", "\"\"\"\\ Restart a timeout, only supported if a timeout was already set up", "True: c = self.read(1) if c: line += c if line[-lenterm:] == expected:", "= self.TIME() + self.duration return self.duration else: return max(0, delta) def restart(self, duration):", "= list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT", "== self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property def port(self): return self._port @port.setter", "self.target_time is not None and self.time_left() <= 0 def time_left(self): \"\"\"Return how many", "duration): \"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking", "self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser = serial.Serial(port = self._port, baudrate=self._baudrate,", "= serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open =", "info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def __init__(self, port =", "baudrate = 921600, timeout = 0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port", "c: line += c if line[-lenterm:] == expected: break if size is not", "size: break else: break if modi_timeout.expired(): break return bytes(line) def read_all(self): if not", "time.time(), but is not affected by system clock # adjustments. TIME = time.monotonic", "stop: init = time.time() recv = serialport.read_until(b\"}\") dt = time.time() - init if", "from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value)", "value): self._write_timeout = value self.serial_port.write_timeout = value @property def dtr(self): if self.type ==", "connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the program, enter", "@baudrate.setter def baudrate(self, value): self._baudrate = value self.serial_port.baudrate = value @property def timeout(self):", "+ duration else: self.target_time = None def expired(self): \"\"\"Return a boolean, telling if", "def read(self, size=1): if not self.is_open: raise Exception(\"serialport is not opened\") if size", "self.duration else: return max(0, delta) def restart(self, duration): \"\"\"\\ Restart a timeout, only", "True break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading info_list", "threading info_list = list_modi_serialports() if not info_list: raise Exception(\"No MODI+ is connected\") serialport", "self.time_left() <= 0 def time_left(self): \"\"\"Return how many seconds are left until the", "import sys import time import serial import serial.tools.list_ports as stl def list_modi_serialports(): info_list", "timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser = serial.Serial(port = self._port, baudrate=self._baudrate,", "= ser self.is_open = True def close(self): if self.is_open: self.serial_port.close() def write(self, data):", "== 0) self.duration = duration if duration is not None: self.target_time = self.TIME()", "None else: delta = self.target_time - self.TIME() if delta > self.duration: # clock", "ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def __init__(self, port = None, baudrate", "is running. TIME = time.time def __init__(self, duration): \"\"\"Initialize a timeout with given", "info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def __init__(self,", "Exception(\"serialport is not opened\") waiting = None if self.type == self.SERIAL_MODE_COMPORT: waiting =", "if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property def port(self): return", "args=(serialport, )).start() print(\"To exit the program, enter 'exit'.\") while not stop: input_data =", "+= c if line[-lenterm:] == expected: break if size is not None and", "self.is_open: raise Exception(\"serialport is not opened\") waiting = None if self.type == self.SERIAL_MODE_COMPORT:", "return self._port @port.setter def port(self, value): self._port = value self.serial_port.port = value @property", "import serial import serial.tools.list_ports as stl def list_modi_serialports(): info_list = [] def __is_modi_port(port):", "raise Exception(\"serialport is not opened\") waiting = None if self.type == self.SERIAL_MODE_COMPORT: waiting", "if available or time.time() in all other cases. The class can also be", "Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open: raise Exception(\"serialport", "def setDTR(self, state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def", "not self.is_open: raise Exception(\"serialport is not opened\") if type(data) is str: data =", "import time import serial import serial.tools.list_ports as stl def list_modi_serialports(): info_list = []", "+ self.duration return self.duration else: return max(0, delta) def restart(self, duration): \"\"\"\\ Restart", "duration self.target_time = self.TIME() + duration # main if __name__ == \"__main__\": stop", "read(self, size=1): if not self.is_open: raise Exception(\"serialport is not opened\") if size ==", "is_infinite are set accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic().", "is not None: self.target_time = self.TIME() + duration else: self.target_time = None def", "opened\") if type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if", "already set up before. \"\"\" self.duration = duration self.target_time = self.TIME() + duration", "boolean, telling if the timeout has expired\"\"\" return self.target_time is not None and", "0 def time_left(self): \"\"\"Return how many seconds are left until the timeout expires\"\"\"", "function is only # supported by Python 3.3 and above. It returns a", "and len(line) >= size: break else: break if modi_timeout.expired(): break return bytes(line) def", "a time in seconds # (float) just as time.time(), but is not affected", "{recv}\") time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports() if not info_list: raise Exception(\"No", "Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open: raise Exception(\"serialport", "while the # timeout is running. TIME = time.time def __init__(self, duration): \"\"\"Initialize", "== expected: break if size is not None and len(line) >= size: break", "serial.tools.list_ports as stl def list_modi_serialports(): info_list = [] def __is_modi_port(port): return (port.vid ==", "== self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\ Abstraction for timeout", "serialport.close() import threading info_list = list_modi_serialports() if not info_list: raise Exception(\"No MODI+ is", "def inWaiting(self): if not self.is_open: raise Exception(\"serialport is not opened\") waiting = None", "= True def close(self): if self.is_open: self.serial_port.close() def write(self, data): if not self.is_open:", "init if recv == None: print(\"disconnected\") stop = True break print(f\"dt: {int(dt *", "serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open = True", "= value @property def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate =", "is not None: self.open(self._port) def open(self, port): self._port = port if sys.platform.startswith(\"win\"): from", "not opened\") self.serial_port.flush() def flushInput(self): if not self.is_open: raise Exception(\"serialport is not opened\")", "is not opened\") waiting = None if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting()", "if self.is_non_blocking: return 0 elif self.is_infinite: return None else: delta = self.target_time -", "handle_received(serialport): global stop while not stop: init = time.time() recv = serialport.read_until(b\"}\") dt", "def timeout(self, value): self._timeout = value self.serial_port.timeout = value @property def write_timeout(self): return", "compatible with all # Python versions but has issues if the clock is", "is adjusted while the # timeout is running. TIME = time.time def __init__(self,", "MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the", "data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not self.is_open: raise Exception(\"serialport is", "not stop: init = time.time() recv = serialport.read_until(b\"}\") dt = time.time() - init", "= self.Timeout(self._timeout) while True: c = self.read(1) if c: line += c if", "= 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport", "0 or None, in order to support non-blocking and fully blocking I/O operations.", "the # timeout is running. TIME = time.time def __init__(self, duration): \"\"\"Initialize a", "= value @property def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout =", "\"\"\" self.duration = duration self.target_time = self.TIME() + duration # main if __name__", "- init if recv == None: print(\"disconnected\") stop = True break print(f\"dt: {int(dt", "I/O operations. The attributes is_non_blocking and is_infinite are set accordingly. \"\"\" if hasattr(time,", "if the clock is adjusted while the # timeout is running. TIME =", "time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports() if not info_list: raise Exception(\"No MODI+", "while not stop: init = time.time() recv = serialport.read_until(b\"}\") dt = time.time() -", "if a timeout was already set up before. \"\"\" self.duration = duration self.target_time", "if not self.is_open: raise Exception(\"serialport is not opened\") return self.serial_port.read_all() def flush(self): if", "not self.is_open: raise Exception(\"serialport is not opened\") waiting = None if self.type ==", "implementation with time.time(). This is compatible with all # Python versions but has", "def __init__(self, port = None, baudrate = 921600, timeout = 0.2, write_timeout =", "== self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not", "Timeout implementation with time.time(). This is compatible with all # Python versions but", "ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the program, enter 'exit'.\") while not", "self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate = value self.serial_port.baudrate = value @property def", "Abstraction for timeout operations. Using time.monotonic() if available or time.time() in all other", "return self._timeout @timeout.setter def timeout(self, value): self._timeout = value self.serial_port.timeout = value @property", "ser else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port =", "is only # supported by Python 3.3 and above. It returns a time", "= 1 SERIAL_MODI_WINUSB = 2 def __init__(self, port = None, baudrate = 921600,", "sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB", "can also be initialized with 0 or None, in order to support non-blocking", "else: delta = self.target_time - self.TIME() if delta > self.duration: # clock jumped,", "until the timeout expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite: return None else:", "if modi_timeout.expired(): break return bytes(line) def read_all(self): if not self.is_open: raise Exception(\"serialport is", "if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value in", "= False def handle_received(serialport): global stop while not stop: init = time.time() recv", "timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open = True def close(self): if self.is_open:", "self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser", "raise Exception(\"serialport is not opened\") if size == None and self.type == self.SERIAL_MODE_COMPORT:", "c if line[-lenterm:] == expected: break if size is not None and len(line)", "size = 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise", "setDTR(self, state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self,", "size=1): if not self.is_open: raise Exception(\"serialport is not opened\") if size == None", "raise Exception(\"serialport is not opened\") lenterm = len(expected) line = bytearray() modi_timeout =", "self.is_non_blocking: return 0 elif self.is_infinite: return None else: delta = self.target_time - self.TIME()", "\"\"\"\\ Abstraction for timeout operations. Using time.monotonic() if available or time.time() in all", "modi_ports = [port for port in stl.comports() if __is_modi_port(port)] for modi_port in modi_ports:", "0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate", "0) self.duration = duration if duration is not None: self.target_time = self.TIME() +", "baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open = True def close(self): if", "while True: c = self.read(1) if c: line += c if line[-lenterm:] ==", "def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport is not opened\") lenterm", "@timeout.setter def timeout(self, value): self._timeout = value self.serial_port.timeout = value @property def write_timeout(self):", "import serial.tools.list_ports as stl def list_modi_serialports(): info_list = [] def __is_modi_port(port): return (port.vid", "flush(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self): if", "break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading info_list =", "self.read(1) if c: line += c if line[-lenterm:] == expected: break if size", "The class can also be initialized with 0 or None, in order to", "set up before. \"\"\" self.duration = duration self.target_time = self.TIME() + duration #", "has issues if the clock is adjusted while the # timeout is running.", "opened\") self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open: raise Exception(\"serialport is not opened\")", "not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open: raise Exception(\"serialport is not", "None: print(\"disconnected\") stop = True break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001)", "else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser", "return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport is not", "not self.is_open: raise Exception(\"serialport is not opened\") if size == None and self.type", "enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def", "not None: self.open(self._port) def open(self, port): self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb", "set accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic(). This function", "= [port for port in stl.comports() if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device)", "setRTS(self, state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self):", "= duration self.target_time = self.TIME() + duration # main if __name__ == \"__main__\":", "class can also be initialized with 0 or None, in order to support", "stop while not stop: init = time.time() recv = serialport.read_until(b\"}\") dt = time.time()", "= timeout self._write_timeout = write_timeout self.serial_port = None self._is_open = False if self._port", "self.is_open: raise Exception(\"serialport is not opened\") if size == None and self.type ==", "if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if", "def open(self, port): self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths", "+ duration # main if __name__ == \"__main__\": stop = False def handle_received(serialport):", "and self.time_left() <= 0 def time_left(self): \"\"\"Return how many seconds are left until", "clock jumped, recalculate self.target_time = self.TIME() + self.duration return self.duration else: return max(0,", "= self.serial_port.inWaiting() return waiting @property def port(self): return self._port @port.setter def port(self, value):", "not opened\") if type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1):", "or None, in order to support non-blocking and fully blocking I/O operations. The", "= serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser", "# main if __name__ == \"__main__\": stop = False def handle_received(serialport): global stop", "be initialized with 0 or None, in order to support non-blocking and fully", "line[-lenterm:] == expected: break if size is not None and len(line) >= size:", "\"\"\"Return how many seconds are left until the timeout expires\"\"\" if self.is_non_blocking: return", "return waiting @property def port(self): return self._port @port.setter def port(self, value): self._port =", "read_all(self): if not self.is_open: raise Exception(\"serialport is not opened\") return self.serial_port.read_all() def flush(self):", "and fully blocking I/O operations. The attributes is_non_blocking and is_infinite are set accordingly.", "if __name__ == \"__main__\": stop = False def handle_received(serialport): global stop while not", "with time.monotonic(). This function is only # supported by Python 3.3 and above.", "None, baudrate = 921600, timeout = 0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT", "write_timeout self.serial_port = None self._is_open = False if self._port is not None: self.open(self._port)", "else: return max(0, delta) def restart(self, duration): \"\"\"\\ Restart a timeout, only supported", "self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise", "expired\"\"\" return self.target_time is not None and self.time_left() <= 0 def time_left(self): \"\"\"Return", "non-blocking and fully blocking I/O operations. The attributes is_non_blocking and is_infinite are set", "def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class Timeout(object):", "TIME = time.time def __init__(self, duration): \"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite", "= len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout) while True: c = self.read(1)", "a timeout with given duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking = (duration", "(duration is None) self.is_non_blocking = (duration == 0) self.duration = duration if duration", "return self.serial_port.read_all() def flush(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush()", "None self._is_open = False if self._port is not None: self.open(self._port) def open(self, port):", "Exception(\"serialport is not opened\") lenterm = len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout)", "return None else: delta = self.target_time - self.TIME() if delta > self.duration: #", "Python 3.3 and above. It returns a time in seconds # (float) just", "timeout=self._timeout) self.serial_port = winusb else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout,", "if not self.is_open: raise Exception(\"serialport is not opened\") if type(data) is str: data", "restart(self, duration): \"\"\"\\ Restart a timeout, only supported if a timeout was already", "not None: self.target_time = self.TIME() + duration else: self.target_time = None def expired(self):", "print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports()", "import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path", "= (duration == 0) self.duration = duration if duration is not None: self.target_time", "and is_infinite are set accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout implementation with", "only supported if a timeout was already set up before. \"\"\" self.duration =", "if self._port is not None: self.open(self._port) def open(self, port): self._port = port if", "= True break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading", "as stl def list_modi_serialports(): info_list = [] def __is_modi_port(port): return (port.vid == 0x2FDE", "def list_modi_serialports(): info_list = [] def __is_modi_port(port): return (port.vid == 0x2FDE and port.pid", "self.serial_port.inWaiting() return waiting @property def port(self): return self._port @port.setter def port(self, value): self._port", "self.is_non_blocking = (duration == 0) self.duration = duration if duration is not None:", "# timeout is running. TIME = time.time def __init__(self, duration): \"\"\"Initialize a timeout", "'monotonic'): # Timeout implementation with time.monotonic(). This function is only # supported by", "class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def __init__(self, port = None,", "self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def", "(port.vid == 0x2FDE and port.pid == 0x0003) modi_ports = [port for port in", "Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise Exception(\"serialport is", "def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate = value self.serial_port.baudrate =", "all other cases. The class can also be initialized with 0 or None,", "if delta > self.duration: # clock jumped, recalculate self.target_time = self.TIME() + self.duration", "recalculate self.target_time = self.TIME() + self.duration return self.duration else: return max(0, delta) def", "duration is not None: self.target_time = self.TIME() + duration else: self.target_time = None", "and self.type == self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None):", "def port(self): return self._port @port.setter def port(self, value): self._port = value self.serial_port.port =", "self.duration: # clock jumped, recalculate self.target_time = self.TIME() + self.duration return self.duration else:", "import threading info_list = list_modi_serialports() if not info_list: raise Exception(\"No MODI+ is connected\")", "else: break if modi_timeout.expired(): break return bytes(line) def read_all(self): if not self.is_open: raise", "serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the program, enter 'exit'.\")", "self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate self._timeout = timeout self._write_timeout = write_timeout", "= data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not self.is_open: raise Exception(\"serialport is not", "return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate = value self.serial_port.baudrate = value @property", "= False if self._port is not None: self.open(self._port) def open(self, port): self._port =", "system clock # adjustments. TIME = time.monotonic else: # Timeout implementation with time.time().", "- {recv}\") time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports() if not info_list: raise", "timeout with given duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking = (duration ==", "if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not", "def close(self): if self.is_open: self.serial_port.close() def write(self, data): if not self.is_open: raise Exception(\"serialport", "baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser = serial.Serial(port = self._port,", "time in seconds # (float) just as time.time(), but is not affected by", "self.serial_port.port = value @property def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate", "return 0 elif self.is_infinite: return None else: delta = self.target_time - self.TIME() if", "seconds are left until the timeout expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite:", "ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open", "port self._baudrate = baudrate self._timeout = timeout self._write_timeout = write_timeout self.serial_port = None", "port.pid == 0x0003) modi_ports = [port for port in stl.comports() if __is_modi_port(port)] for", "# Timeout implementation with time.time(). This is compatible with all # Python versions", "and above. It returns a time in seconds # (float) just as time.time(),", "= serialport.read_until(b\"}\") dt = time.time() - init if recv == None: print(\"disconnected\") stop", "bytes(line) def read_all(self): if not self.is_open: raise Exception(\"serialport is not opened\") return self.serial_port.read_all()", "in all other cases. The class can also be initialized with 0 or", "dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\", "returns a time in seconds # (float) just as time.time(), but is not", "close(self): if self.is_open: self.serial_port.close() def write(self, data): if not self.is_open: raise Exception(\"serialport is", "self.TIME() + duration else: self.target_time = None def expired(self): \"\"\"Return a boolean, telling", "def __is_modi_port(port): return (port.vid == 0x2FDE and port.pid == 0x0003) modi_ports = [port", "= 2 def __init__(self, port = None, baudrate = 921600, timeout = 0.2,", "= value @property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return", "import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value) return info_list", "port(self): return self._port @port.setter def port(self, value): self._port = value self.serial_port.port = value", "Timeout implementation with time.monotonic(). This function is only # supported by Python 3.3", "= None def expired(self): \"\"\"Return a boolean, telling if the timeout has expired\"\"\"", "waiting @property def port(self): return self._port @port.setter def port(self, value): self._port = value", "def timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout = value self.serial_port.timeout =", "= self.TIME() + duration else: self.target_time = None def expired(self): \"\"\"Return a boolean,", "0 elif self.is_infinite: return None else: delta = self.target_time - self.TIME() if delta", "= baudrate self._timeout = timeout self._write_timeout = write_timeout self.serial_port = None self._is_open =", "2 def __init__(self, port = None, baudrate = 921600, timeout = 0.2, write_timeout", "read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport is not opened\") lenterm =", "@write_timeout.setter def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout = value @property def dtr(self):", "0x0003) modi_ports = [port for port in stl.comports() if __is_modi_port(port)] for modi_port in", "== None and self.type == self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def read_until(self,", "Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self): if not self.is_open: raise Exception(\"serialport is", "is not opened\") if type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self,", "raise Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self): if not self.is_open: raise Exception(\"serialport", "@port.setter def port(self, value): self._port = value self.serial_port.port = value @property def baudrate(self):", "def write(self, data): if not self.is_open: raise Exception(\"serialport is not opened\") if type(data)", "if c: line += c if line[-lenterm:] == expected: break if size is", "@property def port(self): return self._port @port.setter def port(self, value): self._port = value self.serial_port.port", "def port(self, value): self._port = value self.serial_port.port = value @property def baudrate(self): return", "self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise Exception(\"serialport is not opened\") waiting =", "running. TIME = time.time def __init__(self, duration): \"\"\"Initialize a timeout with given duration\"\"\"", "return False class Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using time.monotonic() if available", "if not info_list: raise Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True,", "It returns a time in seconds # (float) just as time.time(), but is", "expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport is not opened\") lenterm = len(expected)", "self.target_time = None def expired(self): \"\"\"Return a boolean, telling if the timeout has", "sys import time import serial import serial.tools.list_ports as stl def list_modi_serialports(): info_list =", "sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value in enumerate(path_list):", "accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic(). This function is", "enter 'exit'.\") while not stop: input_data = input() if input_data == \"exit\": stop", "= value self.serial_port.port = value @property def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self,", "port = None, baudrate = 921600, timeout = 0.2, write_timeout = None): self.type", "list_modi_serialports() if not info_list: raise Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received,", "timeout(self, value): self._timeout = value self.serial_port.timeout = value @property def write_timeout(self): return self._write_timeout", "self.duration = duration self.target_time = self.TIME() + duration # main if __name__ ==", "False class Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using time.monotonic() if available or", "support non-blocking and fully blocking I/O operations. The attributes is_non_blocking and is_infinite are", "self._timeout = timeout self._write_timeout = write_timeout self.serial_port = None self._is_open = False if", "not None and self.time_left() <= 0 def time_left(self): \"\"\"Return how many seconds are", "= port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths():", "not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self): if not self.is_open:", "= ser else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port", "operations. Using time.monotonic() if available or time.time() in all other cases. The class", "blocking I/O operations. The attributes is_non_blocking and is_infinite are set accordingly. \"\"\" if", "as time.time(), but is not affected by system clock # adjustments. TIME =", "timeout was already set up before. \"\"\" self.duration = duration self.target_time = self.TIME()", "modi_timeout = self.Timeout(self._timeout) while True: c = self.read(1) if c: line += c", "jumped, recalculate self.target_time = self.TIME() + self.duration return self.duration else: return max(0, delta)", "= value self.serial_port.write_timeout = value @property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return", "by Python 3.3 and above. It returns a time in seconds # (float)", "write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout = value @property def dtr(self): if self.type", "ser self.is_open = True def close(self): if self.is_open: self.serial_port.close() def write(self, data): if", "self.is_open = True def close(self): if self.is_open: self.serial_port.close() def write(self, data): if not", "value): self._baudrate = value self.serial_port.baudrate = value @property def timeout(self): return self._timeout @timeout.setter", "order to support non-blocking and fully blocking I/O operations. The attributes is_non_blocking and", "fully blocking I/O operations. The attributes is_non_blocking and is_infinite are set accordingly. \"\"\"", "self._port @port.setter def port(self, value): self._port = value self.serial_port.port = value @property def", "all # Python versions but has issues if the clock is adjusted while", "= ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the program, enter 'exit'.\") while", "is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise Exception(\"serialport is not", "or time.time() in all other cases. The class can also be initialized with", "for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list =", "stl def list_modi_serialports(): info_list = [] def __is_modi_port(port): return (port.vid == 0x2FDE and", "= None if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property def", "if hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic(). This function is only #", "not info_list: raise Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport,", "return self.duration else: return max(0, delta) def restart(self, duration): \"\"\"\\ Restart a timeout,", "lenterm = len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout) while True: c =", "self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate self._timeout = timeout self._write_timeout", "(float) just as time.time(), but is not affected by system clock # adjustments.", "if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths", "is_non_blocking and is_infinite are set accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout implementation", "port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout)", "has expired\"\"\" return self.target_time is not None and self.time_left() <= 0 def time_left(self):", "timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout = value self.serial_port.timeout = value", "if not self.is_open: raise Exception(\"serialport is not opened\") if size == None and", "given duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking = (duration == 0) self.duration", "__is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list", "self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open:", "implementation with time.monotonic(). This function is only # supported by Python 3.3 and", "other cases. The class can also be initialized with 0 or None, in", "timeout expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite: return None else: delta =", "= None self._is_open = False if self._port is not None: self.open(self._port) def open(self,", "None if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property def port(self):", "self.serial_port.write_timeout = value @property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else:", "if duration is not None: self.target_time = self.TIME() + duration else: self.target_time =", "'exit'.\") while not stop: input_data = input() if input_data == \"exit\": stop =", "self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property def port(self): return self._port", "left until the timeout expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite: return None", "self.duration return self.duration else: return max(0, delta) def restart(self, duration): \"\"\"\\ Restart a", "is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the program,", "self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\ Abstraction for timeout operations.", "while not stop: input_data = input() if input_data == \"exit\": stop = True", "not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not", "is not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open: raise Exception(\"serialport is", "= self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate self._timeout = timeout self._write_timeout =", "serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser =", "Python versions but has issues if the clock is adjusted while the #", "None and self.type == self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\",", "are left until the timeout expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite: return", "= self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser = serial.Serial(port = self._port,", "a timeout was already set up before. \"\"\" self.duration = duration self.target_time =", "delta > self.duration: # clock jumped, recalculate self.target_time = self.TIME() + self.duration return", "timeout, only supported if a timeout was already set up before. \"\"\" self.duration", "not opened\") return self.serial_port.read_all() def flush(self): if not self.is_open: raise Exception(\"serialport is not", "\"__main__\": stop = False def handle_received(serialport): global stop while not stop: init =", "write(self, data): if not self.is_open: raise Exception(\"serialport is not opened\") if type(data) is", "if the timeout has expired\"\"\" return self.target_time is not None and self.time_left() <=", "expires\"\"\" if self.is_non_blocking: return 0 elif self.is_infinite: return None else: delta = self.target_time", "is not opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise Exception(\"serialport is not", "self.serial_port.close() def write(self, data): if not self.is_open: raise Exception(\"serialport is not opened\") if", "is str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not self.is_open: raise", "info_list: raise Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start()", "not opened\") lenterm = len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout) while True:", "self.serial_port = ser self.is_open = True def close(self): if self.is_open: self.serial_port.close() def write(self,", "data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not self.is_open: raise Exception(\"serialport is not opened\")", "the timeout has expired\"\"\" return self.target_time is not None and self.time_left() <= 0", "above. It returns a time in seconds # (float) just as time.time(), but", "[port for port in stl.comports() if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if", "modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb =", "Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit", "def __init__(self, duration): \"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite = (duration is", "in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for", "inWaiting(self): if not self.is_open: raise Exception(\"serialport is not opened\") waiting = None if", "a timeout, only supported if a timeout was already set up before. \"\"\"", "baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate = value self.serial_port.baudrate = value", "init = time.time() recv = serialport.read_until(b\"}\") dt = time.time() - init if recv", "print(\"disconnected\") stop = True break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close()", "raise Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0]) threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To", "and port.pid == 0x0003) modi_ports = [port for port in stl.comports() if __is_modi_port(port)]", "time.time() in all other cases. The class can also be initialized with 0", "self.is_infinite = (duration is None) self.is_non_blocking = (duration == 0) self.duration = duration", "@property def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout", "with given duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking = (duration == 0)", "1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports() if not info_list:", "The attributes is_non_blocking and is_infinite are set accordingly. \"\"\" if hasattr(time, 'monotonic'): #", "is not None and self.time_left() <= 0 def time_left(self): \"\"\"Return how many seconds", "self.serial_port.timeout = value @property def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout", "type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not self.is_open:", "for port in stl.comports() if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"):", "ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser = serial.Serial(port =", "self.is_open: raise Exception(\"serialport is not opened\") if type(data) is str: data = data.encode(\"utf8\")", "raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise Exception(\"serialport", "duration if duration is not None: self.target_time = self.TIME() + duration else: self.target_time", "winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser =", "to support non-blocking and fully blocking I/O operations. The attributes is_non_blocking and is_infinite", "= ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else: ser = serial.Serial(port", "only # supported by Python 3.3 and above. It returns a time in", "raise Exception(\"serialport is not opened\") return self.serial_port.read_all() def flush(self): if not self.is_open: raise", "in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2", "winusb else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port =", "self.TIME() if delta > self.duration: # clock jumped, recalculate self.target_time = self.TIME() +", "self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open = True def close(self):", "self._baudrate = baudrate self._timeout = timeout self._write_timeout = write_timeout self.serial_port = None self._is_open", "= list_modi_serialports() if not info_list: raise Exception(\"No MODI+ is connected\") serialport = ModiSerialPort(info_list[0])", "self._port = port self._baudrate = baudrate self._timeout = timeout self._write_timeout = write_timeout self.serial_port", "expired(self): \"\"\"Return a boolean, telling if the timeout has expired\"\"\" return self.target_time is", "self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self): if not self.is_open: raise", "self._baudrate = value self.serial_port.baudrate = value @property def timeout(self): return self._timeout @timeout.setter def", "None) self.is_non_blocking = (duration == 0) self.duration = duration if duration is not", "if type(data) is str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not", "exit the program, enter 'exit'.\") while not stop: input_data = input() if input_data", "Using time.monotonic() if available or time.time() in all other cases. The class can", "list_modi_serialports(): info_list = [] def __is_modi_port(port): return (port.vid == 0x2FDE and port.pid ==", "self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property def port(self): return self._port @port.setter def", "modi_timeout.expired(): break return bytes(line) def read_all(self): if not self.is_open: raise Exception(\"serialport is not", "value self.serial_port.timeout = value @property def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value):", "return self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using", "duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking = (duration == 0) self.duration =", "not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open:", "attributes is_non_blocking and is_infinite are set accordingly. \"\"\" if hasattr(time, 'monotonic'): # Timeout", "info_list = [] def __is_modi_port(port): return (port.vid == 0x2FDE and port.pid == 0x0003)", "value @property def timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout = value", "Exception(\"serialport is not opened\") if size == None and self.type == self.SERIAL_MODE_COMPORT: size", "3.3 and above. It returns a time in seconds # (float) just as", "supported if a timeout was already set up before. \"\"\" self.duration = duration", "value self.serial_port.baudrate = value @property def timeout(self): return self._timeout @timeout.setter def timeout(self, value):", "self.TIME() + self.duration return self.duration else: return max(0, delta) def restart(self, duration): \"\"\"\\", "time import serial import serial.tools.list_ports as stl def list_modi_serialports(): info_list = [] def", "== \"__main__\": stop = False def handle_received(serialport): global stop while not stop: init", "modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index,", "None and len(line) >= size: break else: break if modi_timeout.expired(): break return bytes(line)", "timeout has expired\"\"\" return self.target_time is not None and self.time_left() <= 0 def", "timeout is running. TIME = time.time def __init__(self, duration): \"\"\"Initialize a timeout with", "self._timeout @timeout.setter def timeout(self, value): self._timeout = value self.serial_port.timeout = value @property def", "is not opened\") return self.serial_port.read_all() def flush(self): if not self.is_open: raise Exception(\"serialport is", "time.time() - init if recv == None: print(\"disconnected\") stop = True break print(f\"dt:", "def time_left(self): \"\"\"Return how many seconds are left until the timeout expires\"\"\" if", "not None and len(line) >= size: break else: break if modi_timeout.expired(): break return", "time_left(self): \"\"\"Return how many seconds are left until the timeout expires\"\"\" if self.is_non_blocking:", "opened\") self.serial_port.flush() def flushInput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput()", "not opened\") self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open: raise Exception(\"serialport is not", "= port self._baudrate = baudrate self._timeout = timeout self._write_timeout = write_timeout self.serial_port =", "self._timeout = value self.serial_port.timeout = value @property def write_timeout(self): return self._write_timeout @write_timeout.setter def", "not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self, state): if not", "if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type =", "- self.TIME() if delta > self.duration: # clock jumped, recalculate self.target_time = self.TIME()", "was already set up before. \"\"\" self.duration = duration self.target_time = self.TIME() +", "self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state)", "time.monotonic else: # Timeout implementation with time.time(). This is compatible with all #", "self._write_timeout = value self.serial_port.write_timeout = value @property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT:", "in order to support non-blocking and fully blocking I/O operations. The attributes is_non_blocking", "how many seconds are left until the timeout expires\"\"\" if self.is_non_blocking: return 0", "= time.time() - init if recv == None: print(\"disconnected\") stop = True break", "break if size is not None and len(line) >= size: break else: break", "SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def __init__(self, port = None, baudrate =", "main if __name__ == \"__main__\": stop = False def handle_received(serialport): global stop while", "ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path =", "0x2FDE and port.pid == 0x0003) modi_ports = [port for port in stl.comports() if", "with 0 or None, in order to support non-blocking and fully blocking I/O", "list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT =", "self._port = value self.serial_port.port = value @property def baudrate(self): return self._baudrate @baudrate.setter def", "is not opened\") lenterm = len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout) while", "daemon=True, args=(serialport, )).start() print(\"To exit the program, enter 'exit'.\") while not stop: input_data", "self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using time.monotonic()", "self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class Timeout(object): \"\"\"\\ Abstraction for", "time.time def __init__(self, duration): \"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite = (duration", "if recv == None: print(\"disconnected\") stop = True break print(f\"dt: {int(dt * 1000.0)}ms", "class Timeout(object): \"\"\"\\ Abstraction for timeout operations. Using time.monotonic() if available or time.time()", "size=None): if not self.is_open: raise Exception(\"serialport is not opened\") lenterm = len(expected) line", "if not self.is_open: raise Exception(\"serialport is not opened\") waiting = None if self.type", "affected by system clock # adjustments. TIME = time.monotonic else: # Timeout implementation", "= self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser self.is_open = True def", "__is_modi_port(port): return (port.vid == 0x2FDE and port.pid == 0x0003) modi_ports = [port for", "opened\") waiting = None if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting", "self._is_open = False if self._port is not None: self.open(self._port) def open(self, port): self._port", "up before. \"\"\" self.duration = duration self.target_time = self.TIME() + duration # main", "value self.serial_port.port = value @property def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value):", "# Python versions but has issues if the clock is adjusted while the", "with time.time(). This is compatible with all # Python versions but has issues", "recv == None: print(\"disconnected\") stop = True break print(f\"dt: {int(dt * 1000.0)}ms -", "if self.is_open: self.serial_port.close() def write(self, data): if not self.is_open: raise Exception(\"serialport is not", "self.is_open: raise Exception(\"serialport is not opened\") return self.serial_port.read_all() def flush(self): if not self.is_open:", "open(self, port): self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if", "len(line) >= size: break else: break if modi_timeout.expired(): break return bytes(line) def read_all(self):", "self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise", "raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise Exception(\"serialport", "= value @property def timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout =", "\"\"\"Return a boolean, telling if the timeout has expired\"\"\" return self.target_time is not", "recv = serialport.read_until(b\"}\") dt = time.time() - init if recv == None: print(\"disconnected\")", "for timeout operations. Using time.monotonic() if available or time.time() in all other cases.", "elif self.is_infinite: return None else: delta = self.target_time - self.TIME() if delta >", "stl.comports() if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import", "def flushOutput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self,", "self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open:", "None and self.time_left() <= 0 def time_left(self): \"\"\"Return how many seconds are left", "(duration == 0) self.duration = duration if duration is not None: self.target_time =", "delta = self.target_time - self.TIME() if delta > self.duration: # clock jumped, recalculate", "stop = False def handle_received(serialport): global stop while not stop: init = time.time()", "is not opened\") self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open: raise Exception(\"serialport is", "serial import serial.tools.list_ports as stl def list_modi_serialports(): info_list = [] def __is_modi_port(port): return", "@property def timeout(self): return self._timeout @timeout.setter def timeout(self, value): self._timeout = value self.serial_port.timeout", "is compatible with all # Python versions but has issues if the clock", "available or time.time() in all other cases. The class can also be initialized", "duration): \"\"\"\\ Restart a timeout, only supported if a timeout was already set", "def flushInput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self):", "dt = time.time() - init if recv == None: print(\"disconnected\") stop = True", "program, enter 'exit'.\") while not stop: input_data = input() if input_data == \"exit\":", "port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type", "the clock is adjusted while the # timeout is running. TIME = time.time", "baudrate self._timeout = timeout self._write_timeout = write_timeout self.serial_port = None self._is_open = False", "value @property def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout = value", "This function is only # supported by Python 3.3 and above. It returns", "raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self, state): if not self.is_open: raise", "= value self.serial_port.baudrate = value @property def timeout(self): return self._timeout @timeout.setter def timeout(self,", "opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise Exception(\"serialport is not opened\") waiting", "else: self.target_time = None def expired(self): \"\"\"Return a boolean, telling if the timeout", "port): self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port", "flushOutput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self, state):", "versions but has issues if the clock is adjusted while the # timeout", "None: self.target_time = self.TIME() + duration else: self.target_time = None def expired(self): \"\"\"Return", "= write_timeout self.serial_port = None self._is_open = False if self._port is not None:", "= 0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate =", "if line[-lenterm:] == expected: break if size is not None and len(line) >=", "from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort, list_modi_winusb_paths if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb", "not opened\") waiting = None if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return", "not self.is_open: raise Exception(\"serialport is not opened\") lenterm = len(expected) line = bytearray()", "None: self.open(self._port) def open(self, port): self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import", "= None, baudrate = 921600, timeout = 0.2, write_timeout = None): self.type =", "data): if not self.is_open: raise Exception(\"serialport is not opened\") if type(data) is str:", "def flush(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self):", "clock is adjusted while the # timeout is running. TIME = time.time def", "\"\"\"Initialize a timeout with given duration\"\"\" self.is_infinite = (duration is None) self.is_non_blocking =", "self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open:", "seconds # (float) just as time.time(), but is not affected by system clock", "value in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB =", "not opened\") if size == None and self.type == self.SERIAL_MODE_COMPORT: size = 1", "return self.target_time is not None and self.time_left() <= 0 def time_left(self): \"\"\"Return how", "time.monotonic() if available or time.time() in all other cases. The class can also", "== None: print(\"disconnected\") stop = True break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\")", "if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self): if not", "self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state)", "= self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb else:", "threading.Thread(target=handle_received, daemon=True, args=(serialport, )).start() print(\"To exit the program, enter 'exit'.\") while not stop:", "str: data = data.encode(\"utf8\") self.serial_port.write(data) def read(self, size=1): if not self.is_open: raise Exception(\"serialport", "exclusive=True) self.serial_port = ser else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout,", "opened\") lenterm = len(expected) line = bytearray() modi_timeout = self.Timeout(self._timeout) while True: c", "not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open:", "waiting = None if self.type == self.SERIAL_MODE_COMPORT: waiting = self.serial_port.inWaiting() return waiting @property", "None, in order to support non-blocking and fully blocking I/O operations. The attributes", "= None): self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate self._timeout =", "def baudrate(self, value): self._baudrate = value self.serial_port.baudrate = value @property def timeout(self): return", "many seconds are left until the timeout expires\"\"\" if self.is_non_blocking: return 0 elif", "SERIAL_MODI_WINUSB = 2 def __init__(self, port = None, baudrate = 921600, timeout =", "self.type == self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if", "value @property def baudrate(self): return self._baudrate @baudrate.setter def baudrate(self, value): self._baudrate = value", "flushInput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self): if", "self.is_infinite: return None else: delta = self.target_time - self.TIME() if delta > self.duration:", "if port in list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate,", "port(self, value): self._port = value self.serial_port.port = value @property def baudrate(self): return self._baudrate", "= self.target_time - self.TIME() if delta > self.duration: # clock jumped, recalculate self.target_time", "def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout =", "self.target_time - self.TIME() if delta > self.duration: # clock jumped, recalculate self.target_time =", "= time.time() recv = serialport.read_until(b\"}\") dt = time.time() - init if recv ==", "# adjustments. TIME = time.monotonic else: # Timeout implementation with time.time(). This is", "self.serial_port = winusb else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True)", "supported by Python 3.3 and above. It returns a time in seconds #", "is not opened\") if size == None and self.type == self.SERIAL_MODE_COMPORT: size =", "True def close(self): if self.is_open: self.serial_port.close() def write(self, data): if not self.is_open: raise", "opened\") return self.serial_port.read_all() def flush(self): if not self.is_open: raise Exception(\"serialport is not opened\")", "info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import list_modi_winusb_paths path_list = list_modi_winusb_paths() for index, value", "duration else: self.target_time = None def expired(self): \"\"\"Return a boolean, telling if the", "value @property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False", "self.serial_port.write(data) def read(self, size=1): if not self.is_open: raise Exception(\"serialport is not opened\") if", "self._write_timeout = write_timeout self.serial_port = None self._is_open = False if self._port is not", "def read_all(self): if not self.is_open: raise Exception(\"serialport is not opened\") return self.serial_port.read_all() def", ")).start() print(\"To exit the program, enter 'exit'.\") while not stop: input_data = input()", "in stl.comports() if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb", "# (float) just as time.time(), but is not affected by system clock #", "value self.serial_port.write_timeout = value @property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr", "self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port = winusb", "line += c if line[-lenterm:] == expected: break if size is not None", "= [] def __is_modi_port(port): return (port.vid == 0x2FDE and port.pid == 0x0003) modi_ports", "self.is_open: self.serial_port.close() def write(self, data): if not self.is_open: raise Exception(\"serialport is not opened\")", "is not None and len(line) >= size: break else: break if modi_timeout.expired(): break", "self.TIME() + duration # main if __name__ == \"__main__\": stop = False def", "False if self._port is not None: self.open(self._port) def open(self, port): self._port = port", "clock # adjustments. TIME = time.monotonic else: # Timeout implementation with time.time(). This", "def setRTS(self, state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def", "self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser = serial.Serial(port =", "adjustments. TIME = time.monotonic else: # Timeout implementation with time.time(). This is compatible", "self.serial_port.flush() def flushInput(self): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushInput() def", "\"\"\" if hasattr(time, 'monotonic'): # Timeout implementation with time.monotonic(). This function is only", "time.time(). This is compatible with all # Python versions but has issues if", "port in stl.comports() if __is_modi_port(port)] for modi_port in modi_ports: info_list.append(modi_port.device) if sys.platform.startswith(\"win\"): from", "__name__ == \"__main__\": stop = False def handle_received(serialport): global stop while not stop:", "not opened\") self.serial_port.setRTS(state) def inWaiting(self): if not self.is_open: raise Exception(\"serialport is not opened\")", "baudrate(self, value): self._baudrate = value self.serial_port.baudrate = value @property def timeout(self): return self._timeout", "just as time.time(), but is not affected by system clock # adjustments. TIME", "info_list = list_modi_serialports() if not info_list: raise Exception(\"No MODI+ is connected\") serialport =", "# clock jumped, recalculate self.target_time = self.TIME() + self.duration return self.duration else: return", "Exception(\"serialport is not opened\") return self.serial_port.read_all() def flush(self): if not self.is_open: raise Exception(\"serialport", "= 921600, timeout = 0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port =", "operations. The attributes is_non_blocking and is_infinite are set accordingly. \"\"\" if hasattr(time, 'monotonic'):", "else: # Timeout implementation with time.time(). This is compatible with all # Python", "* 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import threading info_list = list_modi_serialports() if not", "self.open(self._port) def open(self, port): self._port = port if sys.platform.startswith(\"win\"): from modi2_firmware_updater.util.modi_winusb.modi_winusb import ModiWinUsbComPort,", "not stop: input_data = input() if input_data == \"exit\": stop = True break", "= self.read(1) if c: line += c if line[-lenterm:] == expected: break if", "__init__(self, port = None, baudrate = 921600, timeout = 0.2, write_timeout = None):", "1 SERIAL_MODI_WINUSB = 2 def __init__(self, port = None, baudrate = 921600, timeout", "adjusted while the # timeout is running. TIME = time.time def __init__(self, duration):", "None): self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate self._timeout = timeout", "serialport.read_until(b\"}\") dt = time.time() - init if recv == None: print(\"disconnected\") stop =", "write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout = value", "return bytes(line) def read_all(self): if not self.is_open: raise Exception(\"serialport is not opened\") return", "Exception(\"serialport is not opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise Exception(\"serialport is", "False def handle_received(serialport): global stop while not stop: init = time.time() recv =", "Restart a timeout, only supported if a timeout was already set up before.", "self.duration = duration if duration is not None: self.target_time = self.TIME() + duration", "self.serial_port = None self._is_open = False if self._port is not None: self.open(self._port) def", "if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flushOutput() def setDTR(self, state): if", "with all # Python versions but has issues if the clock is adjusted", "is not opened\") self.serial_port.flush() def flushInput(self): if not self.is_open: raise Exception(\"serialport is not", "state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setRTS(state) def inWaiting(self): if", "raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open: raise", "path_list = list_modi_winusb_paths() for index, value in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort():", "@property def dtr(self): if self.type == self.SERIAL_MODE_COMPORT: return self.serial_port.dtr else: return False class", "a boolean, telling if the timeout has expired\"\"\" return self.target_time is not None", "write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate = baudrate self._timeout", "= self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser = serial.Serial(port", "== 0x0003) modi_ports = [port for port in stl.comports() if __is_modi_port(port)] for modi_port", "opened\") self.serial_port.setDTR(state) def setRTS(self, state): if not self.is_open: raise Exception(\"serialport is not opened\")", "if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.flush() def flushInput(self): if not", "timeout = 0.2, write_timeout = None): self.type = self.SERIAL_MODE_COMPORT self._port = port self._baudrate", "# Timeout implementation with time.monotonic(). This function is only # supported by Python", "not affected by system clock # adjustments. TIME = time.monotonic else: # Timeout", "c = self.read(1) if c: line += c if line[-lenterm:] == expected: break", "index, value in enumerate(path_list): info_list.append(value) return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB", "value): self._timeout = value self.serial_port.timeout = value @property def write_timeout(self): return self._write_timeout @write_timeout.setter", "max(0, delta) def restart(self, duration): \"\"\"\\ Restart a timeout, only supported if a", "self.serial_port = ser else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout, write_timeout=self._write_timeout, exclusive=True)", "= value self.serial_port.timeout = value @property def write_timeout(self): return self._write_timeout @write_timeout.setter def write_timeout(self,", "if not self.is_open: raise Exception(\"serialport is not opened\") lenterm = len(expected) line =", "return info_list class ModiSerialPort(): SERIAL_MODE_COMPORT = 1 SERIAL_MODI_WINUSB = 2 def __init__(self, port", "list_modi_winusb_paths(): self.type = self.SERIAL_MODI_WINUSB winusb = ModiWinUsbComPort(path = self._port, baudrate=self._baudrate, timeout=self._timeout) self.serial_port =", "state): if not self.is_open: raise Exception(\"serialport is not opened\") self.serial_port.setDTR(state) def setRTS(self, state):", "timeout self._write_timeout = write_timeout self.serial_port = None self._is_open = False if self._port is", "= (duration is None) self.is_non_blocking = (duration == 0) self.duration = duration if", "self.target_time = self.TIME() + duration # main if __name__ == \"__main__\": stop =", "stop = True break print(f\"dt: {int(dt * 1000.0)}ms - {recv}\") time.sleep(0.001) serialport.close() import", "also be initialized with 0 or None, in order to support non-blocking and", "print(\"To exit the program, enter 'exit'.\") while not stop: input_data = input() if", "but has issues if the clock is adjusted while the # timeout is", "1 return self.serial_port.read(size) def read_until(self, expected=b\"\\x0A\", size=None): if not self.is_open: raise Exception(\"serialport is", "= bytearray() modi_timeout = self.Timeout(self._timeout) while True: c = self.read(1) if c: line", "return self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout = value @property", "if size == None and self.type == self.SERIAL_MODE_COMPORT: size = 1 return self.serial_port.read(size)", "in seconds # (float) just as time.time(), but is not affected by system", "not opened\") self.serial_port.flushInput() def flushOutput(self): if not self.is_open: raise Exception(\"serialport is not opened\")", "write_timeout=self._write_timeout, exclusive=True) self.serial_port = ser else: ser = serial.Serial(port = self._port, baudrate=self._baudrate, timeout=self._timeout,", "cases. The class can also be initialized with 0 or None, in order", "time.monotonic(). This function is only # supported by Python 3.3 and above. It", "self._write_timeout @write_timeout.setter def write_timeout(self, value): self._write_timeout = value self.serial_port.write_timeout = value @property def", "by system clock # adjustments. TIME = time.monotonic else: # Timeout implementation with", "None def expired(self): \"\"\"Return a boolean, telling if the timeout has expired\"\"\" return", "initialized with 0 or None, in order to support non-blocking and fully blocking", "TIME = time.monotonic else: # Timeout implementation with time.time(). This is compatible with", "before. \"\"\" self.duration = duration self.target_time = self.TIME() + duration # main if", "[] def __is_modi_port(port): return (port.vid == 0x2FDE and port.pid == 0x0003) modi_ports =", "= duration if duration is not None: self.target_time = self.TIME() + duration else:", "def restart(self, duration): \"\"\"\\ Restart a timeout, only supported if a timeout was" ]
[ "List of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"]", "= plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title)", "/ v for v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c],", "= calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum", "Hides / shows isobar lines in the plot if a plot exists :param", "s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line)", "tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception as e: msg", "= \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label)", "i == 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals,", "P, H, P_c, H_c, fractions): \"\"\" Return plot data for a PH phase", "Calculate critical T, V, P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol)", "plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"] label = \"VLE\" for i in", "calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum", "label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals =", "fractions: List of molar fractions :return: x: x values for plot, y: y", "maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color,", "color=colors[0], label=label) label = None label = \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:,", "values :param T_c: Critical temperature :param H_c: Critical enthalpy :param fractions: List of", "self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum", "the plot if a plot exists :param is_checked: Status of isenthalp button (bool)", "pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings =", "p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i ==", "plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack instance \"\"\"", "self.isenthalps: if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps:", "a phase envelope :param tp: Thermopack instance :param prim_vars: Primary variables for the", "variables try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c,", "S: Entropy values :param P_c: Critical pressure :param S_c: Critical entropy :param fractions:", "crit_x = S_c crit_y = P_c # isotherms, isenthalps temperatures = T_list enthalpies", "H_c crit_y = T_c # isobars, isentropes pressures = P_list entropies = S_list", "return x, y, crit_x, crit_y def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions):", "plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on)", "label\"] grid_on = plot_settings[\"Grid on\"] if title == \"<NAME> and Scott type: \":", "plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"]", "Return plot data for a PS phase envelope :param tp: Thermopack instance :param", "the plot (e.g. PT, PH, ..) :param fractions: List of molar fractions for", "= tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c,", "\"--\", \":\", \"-.\"] label = \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:,", "prim_vars: Primary variables for the plot (e.g. PT, PH, ..) :param fractions: List", "pressures = P_list enthalpies = H_list self.isenthalps = [] self.isobars = [] for", "fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals,", "components self.isenthalps = None self.isentropes = None self.isotherms = None self.isobars = None", "temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P,", ":return: x: x values for plot, y: y values for plot, crit_x: x", "density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"]", "== 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals,", "global isopleth_2_color global P_min global P_max global T_min global T_max global nmax isopleth_1_color", "{ 1: \"I\", 2: \"II\", 3: \"III\", 4: \"IV\", 5: \"V\" } title", "= self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif prim_vars == \"TH\": x, y,", "and self.isenthalps: if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else: for line in", "self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm lines in", "else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i],", "label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list,", "= tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list title =", "p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars = None return x, y, crit_x,", "Entropy values :param T_c: Critical temperature :param S_c: Critical entropy :param fractions: List", "from matplotlib.figure import Figure from gui.utils import MessageBox import numpy as np class", "T_list = calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range start\"] V_end = V_c", "for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\" Hides", "ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v for v in V_ph_env], P_ph_env, label=\"Phase", "\"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label", "color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on", "P_c, S_c, fractions): \"\"\" Return plot data for a PS phase envelope :param", "np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots. Contains all plot functionality", "plot data for a PH phase envelope :param tp: Thermopack instance :param P:", "= tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line,", "isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] # Plot depending", "line_color = plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2],", "P_c, H_c, fractions): \"\"\" Return plot data for a PH phase envelope :param", "plot, y: y values for plot, crit_x: x value for critical point, crit_y:", "Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P crit_x", "\"\"\" Hides / shows isotherm lines in the plot if a plot exists", "= calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max,", "pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ", "self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots a binary", "plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary", "on\"] if title == \"<NAME> and Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title)", "def plot_global_binary(self, tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack instance", "self.isenthalps = None self.isotherms = None return x, y, crit_x, crit_y def plot_envelope_TS(self,", "pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T, P, V", "tp, fractions): \"\"\" Plots a pressure density plot :param tp: Thermopack instance :param", "in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked): \"\"\" Hides / shows", "phase envelope :param tp: Thermopack instance :param prim_vars: Primary variables for the plot", "h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line)", "LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label = None label = \"CRIT\" for i", "= 1 / V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel =", "entries in the legend legend = True if legend: if n_isopleths > 0:", "P, T_c, P_c, fractions): \"\"\" Return plot data for a PT phase envelope", "else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i],", "self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes =", "P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c =", "v for v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical", "plot data for a PT phase envelope :param tp: Thermopack instance :param T:", "P_c: Critical pressure :param H_c: Critical enthalpy :param fractions: List of molar fractions", "VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles =", "envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"]", "= \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label)", "self.isentropes = [] for i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i],", "grid_on = plot_settings[\"Grid on\"] if title == \"<NAME> and Scott type: \": title", "color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars = None return x, y, crit_x, crit_y", "= isopleth_settings[\"N max\"] # Plot depending on which primary variables are chosen if", "gui.utils import MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for", "= H_list self.isenthalps = [] self.isobars = [] for i in range(len(pressures)): t_vals,", "type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\", numpoints=1) legend.get_frame().set_linewidth(0.0) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel)", "T, H, T_c, H_c, fractions): \"\"\" Return plot data for a PS phase", "S_c crit_y = P_c # isotherms, isenthalps temperatures = T_list enthalpies = H_list", "fractions): \"\"\" Return plot data for a PT phase envelope :param tp: Thermopack", "# Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T", "= crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical T,", "plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes", "line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x", "\"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label", "= self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None return x,", "instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum", "0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color)", "tp: Thermopack instance :param P: Pressure values :param H: Enthalpy values :param P_c:", "value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x", "= self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min,", "crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical T, V, P", "for a PS phase envelope :param tp: Thermopack instance :param P: Pressure values", "pressures = P_list entropies = S_list self.isobars = [] self.isentropes = [] for", "..) :param fractions: List of molar fractions for the components \"\"\" tpv_settings =", "if a plot exists :param is_checked: Status of isotherm button (bool) \"\"\" if", "== 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals,", "T y = P crit_x = T_c crit_y = P_c # Isenthalps, isentropes", "V, P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"]", "prim_vars == \"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c,", "= H_c crit_y = P_c # isotherms, isentropes temperatures = T_list entropies =", "h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line, = self.axes.plot(s_vals,", "binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"]", "label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals =", "= plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color)", "isotherm button (bool) \"\"\" if not self.empty and self.isotherms: if is_checked: for line", "self.isenthalps = [] self.isobars = [] for i in range(len(pressures)): t_vals, v_vals, s_vals,", "tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings", "tolerance\"] # Calculate critical T, V, P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess,", "p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions,", "tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, =", "plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a phase envelope :param tp: Thermopack instance", "= plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"] label = \"VLE\" for i", "of isenthalp button (bool) \"\"\" if not self.empty and self.isenthalps: if is_checked: for", "all plot functionality for Plot Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig =", "minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\")", "3: \"III\", 4: \"IV\", 5: \"V\" } title = plot_settings[\"Title\"] xlabel = plot_settings[\"x", "= T_c # Isenthalps, isobars pressures = P_list enthalpies = H_list self.isenthalps =", "T_list enthalpies = H_list self.isotherms = [] self.isenthalps = [] for i in", "maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0],", "value for critical point, crit_y: y value for critical point, \"\"\" # Display", "self.isotherms = [] self.isenthalps = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals,", "pressure :param S_c: Critical entropy :param fractions: List of molar fractions :return: x:", "self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else: return # Plotting line_color = plot_settings[\"Colors\"][0]", "T_c: Critical temperature :param S_c: Critical entropy :param fractions: List of molar fractions", "= None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides /", "= self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max =", "H, P_c, H_c, fractions): \"\"\" Return plot data for a PH phase envelope", "line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked):", "= self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp =", "label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars =", "plot_settings[\"Grid on\"] if title == \"<NAME> and Scott type: \": title += ks_strings[KSTYPE]", "crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif prim_vars == \"TS\": x,", "p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions,", "return x, y, crit_x, crit_y def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions):", "self.isotherms = None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self,", "a pressure density plot :param tp: Thermopack instance :param fractions: List of molar", "S_c, fractions) elif prim_vars == \"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T,", "self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label = None label = \"AZ\"", "minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\")", "Return plot data for a PH phase envelope :param tp: Thermopack instance :param", "maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else:", "Plots a binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary", "calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE", "in range(len(T))]) global H_list global T_list global S_list global P_list n_isopleths = isopleth_settings[\"Number", "K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots a", "\"\"\" def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty = True self.components =", "tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T, P,", "density plot :param tp: Thermopack instance :param fractions: List of molar fractions \"\"\"", "plot if a plot exists :param is_checked: Status of isobar button (bool) \"\"\"", "self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial", "prim_vars == \"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c,", "= plot_settings[\"Grid on\"] if title == \"<NAME> and Scott type: \": title +=", "calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max =", "crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else: return # Plotting", "of isotherm button (bool) \"\"\" if not self.empty and self.isotherms: if is_checked: for", "None self.isotherms = None return x, y, crit_x, crit_y def plot_envelope_TS(self, tp, T,", "== 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals,", "pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max", "S y = P crit_x = S_c crit_y = P_c # isotherms, isenthalps", "Status of isenthalp button (bool) \"\"\" if not self.empty and self.isenthalps: if is_checked:", "P_c, H_c, S_c = None, None, None, None, None # Set global variables,", "self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"]", "fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\")", "in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label = None label", "self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp lines in", "xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v for v", "/ shows isotherm lines in the plot if a plot exists :param is_checked:", "self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x", "crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min", "S_c: Critical entropy :param fractions: List of molar fractions :return: x: x values", "np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P)", "isenthalps temperatures = T_list enthalpies = H_list self.isotherms = [] self.isenthalps = []", "T in T_list: P_list = [] for V in V_list: P, = tp.pressure_tv(temp=T,", "Plot Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty = True", "self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H =", "P_c, fractions) elif prim_vars == \"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P,", "= None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PS(self, tp,", ":param fractions: List of molar fractions for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase", "fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure", "in self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def", "crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif prim_vars == \"TS\":", "= plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if title == \"<NAME> and Scott", "n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T) *", "molar fractions for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase", "p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals,", "== 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals,", "linestyle=linestyles[1], color=colors[1], label=label) label = None label = \"CRIT\" for i in range(len(CRIT)):", "MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots.", "0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]],", "labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P, T_c, P_c,", "point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y", "the plot if a plot exists :param is_checked: Status of isentrope button (bool)", "crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical T, V,", "if i == 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, =", "self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots a binary pxy plot", "in self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def", "for matplotlib plots. Contains all plot functionality for Plot Mode \"\"\" def __init__(self,", "in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\" Hides / shows", "i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label = None", "None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows", "i == 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals,", "else: return # Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid", "/ V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"]", "instance :param P: Pressure values :param H: Enthalpy values :param P_c: Critical pressure", "self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif prim_vars == \"TS\": x, y, crit_x,", "CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\",", "P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self,", "size\"] # Calculate T, P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max,", "in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\")", "numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots. Contains all", "shows isenthalp lines in the plot if a plot exists :param is_checked: Status", "= V_c * calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start,", "[\"-\", \"--\", \":\", \"-.\"] label = \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0],", "V_num_points) P_lists = [] for T in T_list: P_list = [] for V", "P: Pressure values :param H: Enthalpy values :param P_c: Critical pressure :param H_c:", "for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel)", "self.empty and self.isentropes: if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else: for line", "envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries", "volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list title = plot_settings[\"Title\"] grid_on", "phase envelope :param tp: Thermopack instance :param T: Temperature values :param S: Entropy", "Critical temperature :param S_c: Critical entropy :param fractions: List of molar fractions :return:", "else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None", "i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min,", "else: for line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp, prim_vars,", "enthalpies = H_list self.isenthalps = [] self.isobars = [] for i in range(len(pressures)):", "\"\"\" if not self.empty and self.isentropes: if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\")", "minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(t_vals, p_vals,", "line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm lines", "color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars", "lines in the plot if a plot exists :param is_checked: Status of isotherm", "fractions): \"\"\" Plots a pressure density plot :param tp: Thermopack instance :param fractions:", "V_c, fractions) except Exception as e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c,", "plot_settings[\"y label\"] self.axes.plot([1 / v for v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1", "if i == 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, =", "P_c # isotherms, isentropes temperatures = T_list entropies = S_list self.isotherms = []", "T_c: Critical temperature :param H_c: Critical enthalpy :param fractions: List of molar fractions", "is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\") self.draw()", "= [] self.isentropes = [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals", "S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40,", "temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] # Plot depending on", "self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum", "= self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes =", "H, T_c, H_c, fractions) elif prim_vars == \"TS\": x, y, crit_x, crit_y =", "= plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def", "T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"]", "maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color,", "None self.isotherms = None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def", "self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides", "\"-.\"] label = \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0],", "p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line)", "V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists = [] for", "maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\",", "= None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_TH(self, tp,", "\"\"\" Plots a phase envelope :param tp: Thermopack instance :param prim_vars: Primary variables", "self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P crit_x = T_c crit_y = P_c", "max\"] # Plot depending on which primary variables are chosen if prim_vars ==", "crit_y = P_c # isotherms, isentropes temperatures = T_list entropies = S_list self.isotherms", "v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except", "List of molar fractions :return: x: x values for plot, y: y values", "(bool) \"\"\" if not self.empty and self.isentropes: if is_checked: for line in self.isentropes:", "maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else:", "for plot, crit_x: x value for critical point, crit_y: y value for critical", "minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"]", "tp: Thermopack instance :param prim_vars: Primary variables for the plot (e.g. PT, PH,", "0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\")", "color=line_color) grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel", "self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals,", "= isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] # Plot depending on which primary", "crit_y def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions): \"\"\" Return plot data", "= T_c # isobars, isentropes pressures = P_list entropies = S_list self.isobars =", "range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings =", "Status of isentrope button (bool) \"\"\" if not self.empty and self.isentropes: if is_checked:", "self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T crit_x = H_c crit_y = T_c", ":param T: Temperature values :param S: Entropy values :param T_c: Critical temperature :param", "None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title =", "min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ =", "\"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif", "fractions) elif prim_vars == \"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H,", "None return x, y, crit_x, crit_y def plot_envelope_PH(self, tp, P, H, P_c, H_c,", "L1VE[2], color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color)", "for line in self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def", "self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"]", "a plot exists :param is_checked: Status of isobar button (bool) \"\"\" if not", "self.isobars = None return x, y, crit_x, crit_y def plot_envelope_TH(self, tp, T, H,", "label=label) label = None label = \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0],", "= [\"-\", \"--\", \":\", \"-.\"] label = \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:,", "xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw()", "= self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms", "= tpv_settings[\"Step size\"] # Calculate T, P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial,", "= plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y", "title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\", numpoints=1) legend.get_frame().set_linewidth(0.0) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.grid(grid_on) self.draw()", "\"\"\" A canvas for matplotlib plots. Contains all plot functionality for Plot Mode", "None ks_strings = { 1: \"I\", 2: \"II\", 3: \"III\", 4: \"IV\", 5:", "S_c crit_y = T_c # Isenthalps, isobars pressures = P_list enthalpies = H_list", "= H_c crit_y = T_c # isobars, isentropes pressures = P_list entropies =", "i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100)", "= self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals,", "H_c, fractions) elif prim_vars == \"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T,", "in the legend legend = True if legend: if n_isopleths > 0: handles,", "1], linestyle=linestyles[1], color=colors[1], label=label) label = None label = \"CRIT\" for i in", "global P_min global P_max global T_min global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2]", "\"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y =", "pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings =", "if n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3],", "return x, y, crit_x, crit_y def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions):", "tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range", "isopleth_2_color global P_min global P_max global T_min global T_max global nmax isopleth_1_color =", "h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0: p_line, =", ":param is_checked: Status of isenthalp button (bool) \"\"\" if not self.empty and self.isenthalps:", "y, crit_x, crit_y def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions): \"\"\" Return", "= plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum", "# isotherms, isenthalps temperatures = T_list enthalpies = H_list self.isotherms = [] self.isenthalps", "for V in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list =", "a binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"]", "P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v", "H, T_c, H_c, fractions): \"\"\" Return plot data for a PS phase envelope", "plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min =", "tp: Thermopack instance :param T: Temperature values :param H: Enthalpy values :param T_c:", "return def toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar lines in the plot", "V_c * calc_settings[\"Volume range start\"] V_end = V_c * calc_settings[\"Volume range end\"] V_num_points", "plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min", "Primary variables for the plot (e.g. PT, PH, ..) :param fractions: List of", "in self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def", "global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list", "L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0]", "critical T, V, P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list", "crit_x = H_c crit_y = P_c # isotherms, isentropes temperatures = T_list entropies", "p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V", "S, T_c, S_c, fractions): \"\"\" Return plot data for a PS phase envelope", "crit_x, crit_y def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions): \"\"\" Return plot", "Enthalpy values :param P_c: Critical pressure :param H_c: Critical enthalpy :param fractions: List", "tpv_settings[\"Step size\"] # Calculate T, P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions,", "else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\"", "minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(t_vals, p_vals,", "= np.linspace(V_start, V_end, V_num_points) P_lists = [] for T in T_list: P_list =", "VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label = None label = \"LLVE\" for i", "H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i],", "== 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals,", "tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c,", "P_list entropies = S_list self.isobars = [] self.isentropes = [] for i in", "= MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c = None, None, None,", "label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort", "= S_c crit_y = P_c # isotherms, isenthalps temperatures = T_list enthalpies =", "fractions) elif prim_vars == \"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S,", "y values for plot, crit_x: x value for critical point, crit_y: y value", "label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical", "pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"]", "== 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals,", "def plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure density plot :param tp: Thermopack", "isobars, isentropes pressures = P_list entropies = S_list self.isobars = [] self.isentropes =", "not self.empty and self.isobars: if is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else: for", "elif prim_vars == \"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c,", "isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list =", "is_checked): \"\"\" Hides / shows isobar lines in the plot if a plot", "of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings", ":param tp: Thermopack instance :param fractions: List of molar fractions \"\"\" calc_settings =", "h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars = None return", "envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"]", "Contains all plot functionality for Plot Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig", "self.isentropes.append(s_line) self.isotherms = None self.isobars = None return x, y, crit_x, crit_y def", "p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE,", "self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure", ":param tp: Thermopack instance :param P: Pressure values :param H: Enthalpy values :param", "matplotlib plots. Contains all plot functionality for Plot Mode \"\"\" def __init__(self, components,", "for a PS phase envelope :param tp: Thermopack instance :param T: Temperature values", "crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical T, V, P T_c, V_c,", "np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T)", ":param tp: Thermopack instance :param T: Temperature values :param P: Pressure values :param", "elif prim_vars == \"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c,", "tp: Thermopack instance :param T: Temperature values :param S: Entropy values :param T_c:", "self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions): \"\"\" Return plot", "P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c *", "= self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial =", "crit_y def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions): \"\"\" Return plot data", "maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error", "minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\")", "self.isenthalps = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i],", "LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\",", "line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar lines", "self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings = {", "self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "= self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes", "dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max)", "= None return x, y, crit_x, crit_y def plot_envelope_TH(self, tp, T, H, T_c,", "self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2],", "self.isentropes = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i],", "= plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title =", "None # Set global variables, so that they are accessible in all phase", "isotherms, isenthalps temperatures = T_list enthalpies = H_list self.isotherms = [] self.isenthalps =", "s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line,", "plot exists :param is_checked: Status of isenthalp button (bool) \"\"\" if not self.empty", "P_c, H_c, fractions) elif prim_vars == \"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp,", "global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum", "y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel)", "is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw()", "s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars = None return", "H_list self.isenthalps = [] self.isobars = [] for i in range(len(pressures)): t_vals, v_vals,", "= calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color", "H_list global T_list global S_list global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list", "label\"] ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\")", "in T_list: P_list = [] for V in V_list: P, = tp.pressure_tv(temp=T, volume=V,", "v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i ==", "L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel", "t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions,", "0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color)", "None label = \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3],", "t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals,", "handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\")", "molar fractions :return: x: x values for plot, y: y values for plot,", "line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\" Hides /", "values :param T_c: Critical temperature :param P_c: Critical pressure :param fractions: List of", "self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in", "self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial", "correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T crit_x =", "label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars =", "t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] #", "h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None return", "* 0.60, np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P) *", "\"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y =", "functions global isopleth_1_color global isopleth_2_color global P_min global P_max global T_min global T_max", "a PT phase envelope :param tp: Thermopack instance :param T: Temperature values :param", "Plot depending on which primary variables are chosen if prim_vars == \"PT\": x,", "line in self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return", "p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min,", "color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max,", "of molar fractions :return: x: x values for plot, y: y values for", "isobars pressures = P_list enthalpies = H_list self.isenthalps = [] self.isobars = []", "label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms =", "point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on)", "data for a PS phase envelope :param tp: Thermopack instance :param T: Temperature", "= plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y],", "variables for the plot (e.g. PT, PH, ..) :param fractions: List of molar", "line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope lines in the", "self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum", "self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2],", "P_c, fractions): \"\"\" Return plot data for a PT phase envelope :param tp:", "H_list entropies = S_list self.isenthalps = [] self.isentropes = [] for i in", "shows isobar lines in the plot if a plot exists :param is_checked: Status", "label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the legend legend", "linestyle=linestyles[2], color=colors[2], label=label) label = None label = \"AZ\" for i in range(len(AZ)):", "e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c = None,", "density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"]", "0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color)", "lines in the plot if a plot exists :param is_checked: Status of isenthalp", "of isobar button (bool) \"\"\" if not self.empty and self.isobars: if is_checked: for", "else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i],", "== 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals,", "color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None return x, y, crit_x, crit_y", "= calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE,", "Isenthalps, isobars pressures = P_list enthalpies = H_list self.isenthalps = [] self.isobars =", "\"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max", "A canvas for matplotlib plots. Contains all plot functionality for Plot Mode \"\"\"", "T_c crit_y = P_c # Isenthalps, isentropes enthalpies = H_list entropies = S_list", "tp, T, S, T_c, S_c, fractions): \"\"\" Return plot data for a PS", "fractions): \"\"\" Plots a phase envelope :param tp: Thermopack instance :param prim_vars: Primary", "y = P crit_x = S_c crit_y = P_c # isotherms, isenthalps temperatures", "color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if", "legend legend = True if legend: if n_isopleths > 0: handles, labels =", "global isopleth_1_color global isopleth_2_color global P_min global P_max global T_min global T_max global", "def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp lines in the plot if", "dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T,", "H_c crit_y = P_c # isotherms, isentropes temperatures = T_list entropies = S_list", "i == 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals,", "prim_vars, fractions): \"\"\" Plots a phase envelope :param tp: Thermopack instance :param prim_vars:", "self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals,", "self.isotherms: if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms:", "str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c = None, None, None, None, None", "None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0],", "H_c, fractions) elif prim_vars == \"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P,", "Critical enthalpy :param fractions: List of molar fractions :return: x: x values for", "fractions: List of molar fractions for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"]", "critical_settings[\"Error tolerance\"] # Calculate critical variables try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp,", "binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE,", "crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif prim_vars == \"PH\": x,", "plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel)", "temperature :param S_c: Critical entropy :param fractions: List of molar fractions :return: x:", "plot data for a PS phase envelope :param tp: Thermopack instance :param T:", "in the plot if a plot exists :param is_checked: Status of isentrope button", "= plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color,", "= np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))]) global H_list global T_list global", "# Calculate critical variables try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol)", "self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps =", "buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T crit_x = S_c", "from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg from matplotlib.figure import Figure from gui.utils import MessageBox import", "maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else:", "= S_list self.isotherms = [] self.isentropes = [] for i in range(len(temperatures)): p_vals,", "tp, T, H, T_c, H_c, fractions): \"\"\" Return plot data for a PS", "= tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"]", "\":\", \"-.\"] label = \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1],", "def toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar lines in the plot if", "s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None return", "shows isentrope lines in the plot if a plot exists :param is_checked: Status", "label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title)", "\"\"\" Hides / shows isobar lines in the plot if a plot exists", "instance :param T: Temperature values :param H: Enthalpy values :param T_c: Critical temperature", "maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color)", "self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label = None", "minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\")", "i == 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals,", "label\"] ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if title == \"<NAME>", "= P crit_x = H_c crit_y = P_c # isotherms, isentropes temperatures =", "Figure from gui.utils import MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A", "color=colors[3], label=label) label = None ks_strings = { 1: \"I\", 2: \"II\", 3:", "= self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings =", "not self.empty and self.isenthalps: if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else: for", "plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots", "= calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists = [] for T", "self.isentropes.append(s_line) self.isenthalps = None self.isobars = None return x, y, crit_x, crit_y def", "minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\")", "T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] #", "binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings", "= [] self.isenthalps = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals", "dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max,", "for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x =", "if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0]", "= self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend()", "T_c # Isenthalps, isobars pressures = P_list enthalpies = H_list self.isenthalps = []", "fractions :return: x: x values for plot, y: y values for plot, crit_x:", "range start\"] V_end = V_c * calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"]", "if title == \"<NAME> and Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend", "t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if", "button (bool) \"\"\" if not self.empty and self.isenthalps: if is_checked: for line in", "for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "== \"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions)", "exists :param is_checked: Status of isentrope button (bool) \"\"\" if not self.empty and", "S_list global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths)", "== \"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions)", "self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P crit_x = S_c crit_y =", "1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] #", "minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\")", "title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on =", "critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S", "for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label =", "p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max", "i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel)", "plot exists :param is_checked: Status of isentrope button (bool) \"\"\" if not self.empty", "self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"]", "FigureCanvasQTAgg from matplotlib.figure import Figure from gui.utils import MessageBox import numpy as np", "envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i],", ":param S: Entropy values :param T_c: Critical temperature :param S_c: Critical entropy :param", "in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope lines", "critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min", "critical point, crit_y: y value for critical point, \"\"\" # Display correct buttons", "isenthalp button (bool) \"\"\" if not self.empty and self.isenthalps: if is_checked: for line", "= crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical T, V, P T_c,", "if legend: if n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0],", "s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0: p_line,", "# Calculate T, P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min,", "values :param P_c: Critical pressure :param H_c: Critical enthalpy :param fractions: List of", "H y = T crit_x = H_c crit_y = T_c # isobars, isentropes", "minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\")", "handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp,", "Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T crit_x", "isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial", "size\"] # Calculate T, P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max,", "calc_settings[\"Volume range start\"] V_end = V_c * calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num", "n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P)", "0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\")", "in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 /", "= isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S),", "= [] for i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions,", "for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label =", "V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel", "calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate", "x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif prim_vars", "\"II\", 3: \"III\", 4: \"IV\", 5: \"V\" } title = plot_settings[\"Title\"] xlabel =", "if a plot exists :param is_checked: Status of isenthalp button (bool) \"\"\" if", "tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press", "a binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"]", "is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"]", "self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots a binary pxy plot :param tp:", "n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list title = plot_settings[\"Title\"] grid_on =", "step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T, P, V =", "T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max =", "point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y", "else: return def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a phase envelope :param", "for line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp, prim_vars, fractions):", "and self.isotherms: if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else: for line in", "canvas for matplotlib plots. Contains all plot functionality for Plot Mode \"\"\" def", ":param P: Pressure values :param S: Entropy values :param P_c: Critical pressure :param", "# Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel", "= plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"]", "plot :param tp: Thermopack instance :param fractions: List of molar fractions \"\"\" calc_settings", "isotherm lines in the plot if a plot exists :param is_checked: Status of", "color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals", "self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar", "envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"]", "T crit_x = S_c crit_y = T_c # Isenthalps, isobars pressures = P_list", "\"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"]", "color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars", "label = None label = \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:,", "def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions): \"\"\" Return plot data for", "self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope lines in the plot", "n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"]", "data for a PS phase envelope :param tp: Thermopack instance :param P: Pressure", "self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2],", "instance :param T: Temperature values :param P: Pressure values :param T_c: Critical temperature", "is_checked): \"\"\" Hides / shows isentrope lines in the plot if a plot", "import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots. Contains", "lines in the plot if a plot exists :param is_checked: Status of isentrope", "Enthalpy values :param T_c: Critical temperature :param H_c: Critical enthalpy :param fractions: List", "\"\"\" Return plot data for a PH phase envelope :param tp: Thermopack instance", "nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max =", "def toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope lines in the plot if", "global variables, so that they are accessible in all phase envelope plot functions", "= True self.components = components self.isenthalps = None self.isentropes = None self.isotherms =", "y = T crit_x = S_c crit_y = T_c # Isenthalps, isobars pressures", "= np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions)", "(bool) \"\"\" if not self.empty and self.isotherms: if is_checked: for line in self.isotherms:", "= self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals,", "if not self.empty and self.isenthalps: if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else:", "label = \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3],", "envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"]", "PS phase envelope :param tp: Thermopack instance :param P: Pressure values :param S:", "for a PT phase envelope :param tp: Thermopack instance :param T: Temperature values", "def plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack instance", "V_c, P_c, H_c, S_c = None, None, None, None, None # Set global", "line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp, prim_vars, fractions): \"\"\"", "y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif prim_vars ==", "Isenthalps, isentropes enthalpies = H_list entropies = S_list self.isenthalps = [] self.isentropes =", "1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths) temp =", "for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings", "None, None, None, None, None # Set global variables, so that they are", "= calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max =", "temperature :param P_c: Critical pressure :param fractions: List of molar fractions :return: x:", "= H y = P crit_x = H_c crit_y = P_c # isotherms,", "x = H y = T crit_x = H_c crit_y = T_c #", "crit_x, crit_y def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions): \"\"\" Return plot", "self.isobars = [] self.isentropes = [] for i in range(len(pressures)): t_vals, v_vals, s_vals,", "T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start =", "label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms =", "class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots. Contains all plot functionality for", "rho_list = 1 / V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel", "0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings = { 1:", "= T y = P crit_x = T_c crit_y = P_c # Isenthalps,", "H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T)", "Calculate critical variables try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c", "LLE[2], color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color)", "S, P_c, S_c, fractions) elif prim_vars == \"TH\": x, y, crit_x, crit_y =", "self.isentropes = None self.isotherms = None return x, y, crit_x, crit_y def plot_binary_pxy(self,", "t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i ==", "label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v for v in V_ph_env], P_ph_env,", "plot (e.g. PT, PH, ..) :param fractions: List of molar fractions for the", "label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals =", "buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P crit_x = S_c", "np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths)", "None return x, y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a binary", "= None label = \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1],", "plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if title ==", "label = \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2],", "T_c, H_c, fractions) elif prim_vars == \"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp,", "color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals", "crit_x = S_c crit_y = T_c # Isenthalps, isobars pressures = P_list enthalpies", "is_checked: Status of isobar button (bool) \"\"\" if not self.empty and self.isobars: if", "in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))]) global H_list", "self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def", "legend = True if legend: if n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels()", "\"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif", "color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max,", "a plot exists :param is_checked: Status of isenthalp button (bool) \"\"\" if not", "enthalpy :param fractions: List of molar fractions :return: x: x values for plot,", "\"\"\" Hides / shows isenthalp lines in the plot if a plot exists", "== 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals,", "= self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum", "== \"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions)", ":param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"]", "plot if a plot exists :param is_checked: Status of isenthalp button (bool) \"\"\"", "np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60,", "x, y, crit_x, crit_y def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions): \"\"\"", "V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for i in", "= H y = T crit_x = H_c crit_y = T_c # isobars,", "of molar fractions for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings =", "S, T_c, S_c, fractions) else: return # Plotting line_color = plot_settings[\"Colors\"][0] point_color =", ":param prim_vars: Primary variables for the plot (e.g. PT, PH, ..) :param fractions:", "pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min", "p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None", "point, crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0)", "point, crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1)", "= self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min,", "critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate critical variables try:", "start\"] V_end = V_c * calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"] V_list", "is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw()", "np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for", "S: Entropy values :param T_c: Critical temperature :param S_c: Critical entropy :param fractions:", "shows isotherm lines in the plot if a plot exists :param is_checked: Status", "None return x, y, crit_x, crit_y def plot_envelope_PS(self, tp, P, S, P_c, S_c,", "y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else: return #", "self.isotherms = None return x, y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots", "nmax=100) if i == 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line,", "[] for i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min,", "binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings", "T crit_x = H_c crit_y = T_c # isobars, isentropes pressures = P_list", ":param P_c: Critical pressure :param fractions: List of molar fractions :return: x: x", "crit_y = T_c # Isenthalps, isobars pressures = P_list enthalpies = H_list self.isenthalps", "\"IV\", 5: \"V\" } title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel =", "plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel", "[] self.isentropes = [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals =", "i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label = None", "Thermopack instance :param fractions: List of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"]", "= np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v =", "Plots a binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global", "plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"]", "crit_y def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions): \"\"\" Return plot data", "critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T", "range end\"] V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists =", "toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm lines in the plot if a", "[P_c], label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\")", "= plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid", "title == \"<NAME> and Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend =", "t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions,", "KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles", "color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars = None return x, y, crit_x, crit_y", "tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step", "Thermopack instance :param P: Pressure values :param H: Enthalpy values :param P_c: Critical", "phase envelope :param tp: Thermopack instance :param T: Temperature values :param P: Pressure", "Set global variables, so that they are accessible in all phase envelope plot", "def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty = True self.components = components", "crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True)", "color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max,", "self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P crit_x = H_c crit_y", "5: \"V\" } title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y", "self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label = None label = \"CRIT\"", "crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif prim_vars == \"PS\":", "point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y", "isotherms, isentropes temperatures = T_list entropies = S_list self.isotherms = [] self.isentropes =", "= None label = \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1],", "pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"]", "PT, PH, ..) :param fractions: List of molar fractions for the components \"\"\"", ":param T: Temperature values :param H: Enthalpy values :param T_c: Critical temperature :param", "maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else:", "1], linestyle=linestyles[0], color=colors[0], label=label) label = None label = \"LLVE\" for i in", "range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label = None label =", "T_min global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min =", "self.fig = Figure(dpi=100) self.empty = True self.components = components self.isenthalps = None self.isentropes", "is_checked: Status of isotherm button (bool) \"\"\" if not self.empty and self.isotherms: if", "a PH phase envelope :param tp: Thermopack instance :param P: Pressure values :param", "self.isentropes = None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_TH(self,", "critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H", "if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, =", "toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp lines in the plot if a", "None self.isentropes = None self.isotherms = None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences", "if i == 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, =", "of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list", "\"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif", "ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if title == \"<NAME> and", "crit_x = T_c crit_y = P_c # Isenthalps, isentropes enthalpies = H_list entropies", "s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0:", "for plot, y: y values for plot, crit_x: x value for critical point,", "= self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min =", "of isentrope button (bool) \"\"\" if not self.empty and self.isentropes: if is_checked: for", "color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) #", "t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0:", "instance :param fractions: List of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings", "lines in the plot if a plot exists :param is_checked: Status of isobar", "if not self.empty and self.isentropes: if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else:", ":param H_c: Critical enthalpy :param fractions: List of molar fractions :return: x: x", "fractions) elif prim_vars == \"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S,", "= True if legend: if n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3],", "p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None return x, y, crit_x,", "density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"]", "self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the legend legend = True if", "label\"] self.axes.plot([1 / v for v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 /", "= calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE,", "1], linestyle=linestyles[2], color=colors[2], label=label) label = None label = \"AZ\" for i in", "# Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P", "n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2],", "line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope", "value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x", "def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions): \"\"\" Return plot data for", "calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"]", "calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i],", "isentrope button (bool) \"\"\" if not self.empty and self.isentropes: if is_checked: for line", "the plot if a plot exists :param is_checked: Status of isobar button (bool)", "T_list global S_list global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H),", "buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P crit_x = H_c", "tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial", "tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T,", "Hides / shows isentrope lines in the plot if a plot exists :param", "envelope :param tp: Thermopack instance :param T: Temperature values :param P: Pressure values", "# isobars, isentropes pressures = P_list entropies = S_list self.isobars = [] self.isentropes", "P, T_c, P_c, fractions) elif prim_vars == \"PH\": x, y, crit_x, crit_y =", "= self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars = None return x,", "= crit_settings[\"Error tolerance\"] # Calculate critical T, V, P T_c, V_c, P_c =", "plotting_preferences): self.fig = Figure(dpi=100) self.empty = True self.components = components self.isenthalps = None", "the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings =", "np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error", "self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P crit_x = H_c crit_y = P_c", "plot, crit_x: x value for critical point, crit_y: y value for critical point,", "matplotlib.figure import Figure from gui.utils import MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg):", "exists :param is_checked: Status of isenthalp button (bool) \"\"\" if not self.empty and", "P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H", "[] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min,", "= H_list self.isotherms = [] self.isenthalps = [] for i in range(len(temperatures)): p_vals,", "is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not", "plot :param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global", "tp.entropy_tv(T_c, V_c, fractions) except Exception as e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c,", "0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label = None label = \"CRIT\" for", "nmax=nmax) if i == 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line,", "PH, ..) :param fractions: List of molar fractions for the components \"\"\" tpv_settings", "[] self.isenthalps = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals =", "maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color,", "in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max,", "crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif prim_vars == \"TH\":", "= calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range start\"] V_end = V_c *", "AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\",", "T: Temperature values :param P: Pressure values :param T_c: Critical temperature :param P_c:", "range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label = None label =", "self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "from gui.utils import MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas", "P crit_x = T_c crit_y = P_c # Isenthalps, isentropes enthalpies = H_list", "/ V_c], [P_c], label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) +", "P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list title", "x: x values for plot, y: y values for plot, crit_x: x value", "Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\", numpoints=1) legend.get_frame().set_linewidth(0.0) self.axes.set_xlabel(xlabel)", "T_c, P_c, fractions): \"\"\" Return plot data for a PT phase envelope :param", "grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title", "crit_x: x value for critical point, crit_y: y value for critical point, \"\"\"", "chosen if prim_vars == \"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P,", "p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line)", "crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack", "t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line)", "p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None", "tp, P, H, P_c, H_c, fractions): \"\"\" Return plot data for a PH", "nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line,", "self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\"", "T_c, H_c, fractions): \"\"\" Return plot data for a PS phase envelope :param", "0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color)", "else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions): \"\"\" Return", "T, S, T_c, S_c, fractions): \"\"\" Return plot data for a PS phase", "Thermopack instance :param T: Temperature values :param S: Entropy values :param T_c: Critical", "that they are accessible in all phase envelope plot functions global isopleth_1_color global", "\"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label", "for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label =", "None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PS(self, tp, P,", "correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P crit_x =", "Critical pressure :param fractions: List of molar fractions :return: x: x values for", "np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))]) global H_list global T_list global S_list", "P crit_x = S_c crit_y = P_c # isotherms, isenthalps temperatures = T_list", "self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self,", "as e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c =", "def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions): \"\"\" Return plot data for", "i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals,", "if i == 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, =", "color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms", "temperature :param H_c: Critical enthalpy :param fractions: List of molar fractions :return: x:", "Critical temperature :param H_c: Critical enthalpy :param fractions: List of molar fractions :return:", "crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True)", "== \"PS\": x, y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions)", "Temperature values :param H: Enthalpy values :param T_c: Critical temperature :param H_c: Critical", "= tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0: p_line, = self.axes.plot(h_vals,", "prim_vars == \"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c,", "self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms =", "minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, t_vals,", "point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the legend legend =", "T: Temperature values :param H: Enthalpy values :param T_c: Critical temperature :param H_c:", "y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True)", "self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a", "P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i],", "else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i],", "handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self,", "t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals,", "linestyle=linestyles[0], color=colors[0], label=label) label = None label = \"LLVE\" for i in range(len(LLVE)):", "V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c", "point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel =", "= S_list self.isenthalps = [] self.isentropes = [] for i in range(len(enthalpies)): t_vals,", "plot data for a PS phase envelope :param tp: Thermopack instance :param P:", "Thermopack instance :param P: Pressure values :param S: Entropy values :param P_c: Critical", "self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2],", "not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not None:", "self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P crit_x = T_c crit_y", "Pressure values :param S: Entropy values :param P_c: Critical pressure :param S_c: Critical", "T_list: P_list = [] for V in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions)", "v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i ==", "toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope lines in the plot if a", "T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"]", "plot_global_binary(self, tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack instance \"\"\"", "nmax = isopleth_settings[\"N max\"] # Plot depending on which primary variables are chosen", "h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions,", "1 / V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x", ":param S_c: Critical entropy :param fractions: List of molar fractions :return: x: x", "label = None label = \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:,", "crit_y def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions): \"\"\" Return plot data", "np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"]", "tp, P, S, P_c, S_c, fractions): \"\"\" Return plot data for a PS", "matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg from matplotlib.figure import Figure from gui.utils import MessageBox import numpy", "# Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P", "nmax=nmax) if i == 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line,", "critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S", "isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths)", "the legend legend = True if legend: if n_isopleths > 0: handles, labels", "legend: if n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]],", "= self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif prim_vars == \"PS\": x, y,", "P_c, S_c, fractions) elif prim_vars == \"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp,", "colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"] label = \"VLE\" for", "dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0]", "self.isenthalps = [] self.isentropes = [] for i in range(len(enthalpies)): t_vals, p_vals, v_vals,", "elif prim_vars == \"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c,", "= self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None return x,", "None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PH(self, tp, P,", "else: return def toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm lines in the", "plot if a plot exists :param is_checked: Status of isotherm button (bool) \"\"\"", "x = T y = P crit_x = T_c crit_y = P_c #", "isopleth_1_color global isopleth_2_color global P_min global P_max global T_min global T_max global nmax", "P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid", "= np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) *", "button (bool) \"\"\" if not self.empty and self.isobars: if is_checked: for line in", "on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x,", "self.isobars = [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i],", "self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw()", "critical variables try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c =", "tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0: p_line, = self.axes.plot(h_vals, t_vals,", "V_list = np.linspace(V_start, V_end, V_num_points) P_lists = [] for T in T_list: P_list", ":param P: Pressure values :param H: Enthalpy values :param P_c: Critical pressure :param", "h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, =", "color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max,", "= self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps", "V_start = V_c * calc_settings[\"Volume range start\"] V_end = V_c * calc_settings[\"Volume range", "in the plot if a plot exists :param is_checked: Status of isenthalp button", "are accessible in all phase envelope plot functions global isopleth_1_color global isopleth_2_color global", "a PS phase envelope :param tp: Thermopack instance :param T: Temperature values :param", "Critical entropy :param fractions: List of molar fractions :return: x: x values for", "in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if", "L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color)", "line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked): \"\"\" Hides /", "a PS phase envelope :param tp: Thermopack instance :param P: Pressure values :param", "for the plot (e.g. PT, PH, ..) :param fractions: List of molar fractions", "= tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T,", "0.60, np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40,", "AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings = { 1: \"I\",", ":param H: Enthalpy values :param T_c: Critical temperature :param H_c: Critical enthalpy :param", "Thermopack instance :param T: Temperature values :param P: Pressure values :param T_c: Critical", "title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title)", "# Plot depending on which primary variables are chosen if prim_vars == \"PT\":", "self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes =", "maximum_temperature=500.0, nmax=100) if i == 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else:", "Critical temperature :param P_c: Critical pressure :param fractions: List of molar fractions :return:", "plot exists :param is_checked: Status of isobar button (bool) \"\"\" if not self.empty", "\"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y =", "crit_x, crit_y def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions): \"\"\" Return plot", "# Isenthalps, isobars pressures = P_list enthalpies = H_list self.isenthalps = [] self.isobars", "i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if", "min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE,", "Pressure values :param T_c: Critical temperature :param P_c: Critical pressure :param fractions: List", "P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start", "in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots", "for i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0],", "V_c * calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end,", "t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min,", "P_list = [] for V in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P)", "= plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v for v in", "self.draw() def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions): \"\"\" Return plot data", "phase envelope :param tp: Thermopack instance :param P: Pressure values :param H: Enthalpy", "functionality for Plot Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty", "v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0:", "is_checked): \"\"\" Hides / shows isotherm lines in the plot if a plot", "= { 1: \"I\", 2: \"II\", 3: \"III\", 4: \"IV\", 5: \"V\" }", "tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range start\"] V_end =", "/ shows isenthalp lines in the plot if a plot exists :param is_checked:", "plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if title == \"<NAME> and Scott type:", "plot exists :param is_checked: Status of isotherm button (bool) \"\"\" if not self.empty", "= None ks_strings = { 1: \"I\", 2: \"II\", 3: \"III\", 4: \"IV\",", "crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True)", "# Isenthalps, isentropes enthalpies = H_list entropies = S_list self.isenthalps = [] self.isentropes", "\"\"\" Hides / shows isentrope lines in the plot if a plot exists", "isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] # Plot depending on which primary variables", "y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True)", "label=label) label = None ks_strings = { 1: \"I\", 2: \"II\", 3: \"III\",", "values :param S: Entropy values :param P_c: Critical pressure :param S_c: Critical entropy", "linestyles = [\"-\", \"--\", \":\", \"-.\"] label = \"VLE\" for i in range(len(VLE)):", "\"\"\" Return plot data for a PS phase envelope :param tp: Thermopack instance", "V in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1", "[] self.isentropes = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals =", ":param T: Temperature values :param P: Pressure values :param T_c: Critical temperature :param", "tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is not None:", "= tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for", "Temperature values :param S: Entropy values :param T_c: Critical temperature :param S_c: Critical", "Hides / shows isenthalp lines in the plot if a plot exists :param", "+ \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\"", "crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True)", "= self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings =", "T, P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True)", "tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T", "calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists = [] for T in", "pressure density plot :param tp: Thermopack instance :param fractions: List of molar fractions", "self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P crit_x = T_c crit_y =", "h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line)", "label=label) label = None label = \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0],", "T, P, T_c, P_c, fractions) elif prim_vars == \"PH\": x, y, crit_x, crit_y", "self.axes.plot([1 / v for v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c],", "temperatures = T_list enthalpies = H_list self.isotherms = [] self.isenthalps = [] for", "for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x =", "plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase", "self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure", "self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i])", "= tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line, = self.axes.plot(s_vals, t_vals,", "= P_list entropies = S_list self.isobars = [] self.isentropes = [] for i", "self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars = None return x, y,", "T, P, T_c, P_c, fractions): \"\"\" Return plot data for a PT phase", "fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color,", "minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals,", "return # Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"]", "= tpv_settings[\"Step size\"] # Calculate T, P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial,", "temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions)", "not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title", "= tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate", "maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color,", "self.empty and self.isobars: if is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else: for line", "calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp", ":param P: Pressure values :param T_c: Critical temperature :param P_c: Critical pressure :param", "Sort entries in the legend legend = True if legend: if n_isopleths >", "if is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\")", "P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max", "P_min global P_max global T_min global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color", "if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\")", "x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif prim_vars", "range(len(T))]) global H_list global T_list global S_list global P_list n_isopleths = isopleth_settings[\"Number of", "plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp lines in the plot", "as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots. Contains all plot", "y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy plot :param", "self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals,", "x = S y = T crit_x = S_c crit_y = T_c #", "calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range start\"] V_end = V_c * calc_settings[\"Volume", "points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists = [] for T in T_list:", "pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"]", "fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(t_vals,", "= isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max =", "T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i],", "variables are chosen if prim_vars == \"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp,", "p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals,", "fractions) else: return # Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on =", "T, P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True)", "P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list =", "self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None return x, y,", "xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if", "= P_c # Isenthalps, isentropes enthalpies = H_list entropies = S_list self.isenthalps =", "p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size", "\"\"\" Plots a pressure density plot :param tp: Thermopack instance :param fractions: List", "self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self,", "= T_list enthalpies = H_list self.isotherms = [] self.isenthalps = [] for i", "maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else:", "/ shows isentrope lines in the plot if a plot exists :param is_checked:", "= calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors =", "None, None, None, None # Set global variables, so that they are accessible", "= None label = \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1],", "temperatures = T_list entropies = S_list self.isotherms = [] self.isentropes = [] for", "None return x, y, crit_x, crit_y def plot_envelope_TH(self, tp, T, H, T_c, H_c,", "self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides / shows isentrope lines in", "step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T_ph_env, P_ph_env, V_ph_env =", "self.isentropes: if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes:", "self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y = P crit_x = H_c crit_y =", "self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PH(self, tp, P, H,", "line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked):", "p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars = None return x, y, crit_x,", "self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots a binary pxy plot :param", ":param S: Entropy values :param P_c: Critical pressure :param S_c: Critical entropy :param", "P_lists = [] for T in T_list: P_list = [] for V in", "correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P crit_x =", "== 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals,", "= self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes", "pressure :param H_c: Critical enthalpy :param fractions: List of molar fractions :return: x:", "crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif prim_vars == \"PH\":", "[] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min,", "y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif prim_vars ==", "self.isentropes: line.set_linestyle(\"None\") self.draw() else: return def toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm", "self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P crit_x = S_c crit_y = P_c", "y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif prim_vars ==", "\"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label", "y: y values for plot, crit_x: x value for critical point, crit_y: y", "s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars = None return", "values :param P: Pressure values :param T_c: Critical temperature :param P_c: Critical pressure", "plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure density plot :param tp: Thermopack instance", "\"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y =", "plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"]", "\"I\", 2: \"II\", 3: \"III\", 4: \"IV\", 5: \"V\" } title = plot_settings[\"Title\"]", "tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(s_vals, p_vals,", "label = \"VLE\" for i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0],", "y, crit_x, crit_y def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions): \"\"\" Return", "self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max,", "minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"] label =", "isobar button (bool) \"\"\" if not self.empty and self.isobars: if is_checked: for line", "a plot exists :param is_checked: Status of isentrope button (bool) \"\"\" if not", "isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum", "= critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate critical variables", "nmax=nmax) if i == 0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line,", "L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid", "range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax)", "v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for", "S_list self.isobars = [] self.isentropes = [] for i in range(len(pressures)): t_vals, v_vals,", "entropies = S_list self.isenthalps = [] self.isentropes = [] for i in range(len(enthalpies)):", "= plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\"", "= self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals,", "Hides / shows isotherm lines in the plot if a plot exists :param", "= self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum", "L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is", "self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure density plot :param", "plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions): \"\"\" Return plot data for a", "S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception as e: msg = MessageBox(\"Error\", str(e))", "plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions): \"\"\" Return plot data for a", "maximum_temperature=T_max) if i == 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else: p_line,", "isentropes temperatures = T_list entropies = S_list self.isotherms = [] self.isentropes = []", "= S_c crit_y = T_c # Isenthalps, isobars pressures = P_list enthalpies =", "point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y", "fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals,", "= plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp lines in the", "on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v for", "they are accessible in all phase envelope plot functions global isopleth_1_color global isopleth_2_color", "if a plot exists :param is_checked: Status of isobar button (bool) \"\"\" if", "line in self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self,", "self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif prim_vars == \"PS\": x, y, crit_x,", "* 1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60, np.max(P) * 1.40, n_isopleths) temp", "calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp,", "\"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"]", "which primary variables are chosen if prim_vars == \"PT\": x, y, crit_x, crit_y", "else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None", "s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line)", "tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"] label", "= np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths)", "in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if", "y, crit_x, crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif prim_vars ==", "4: \"IV\", 5: \"V\" } title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel", "= S y = P crit_x = S_c crit_y = P_c # isotherms,", "0.60, np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol =", "values for plot, crit_x: x value for critical point, crit_y: y value for", "p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min,", "label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp):", "for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x =", "__init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty = True self.components = components self.isenthalps", "minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\", label=\"Isobar\") else:", "label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp,", "color=colors[1], label=label) label = None label = \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:,", "P_c: Critical pressure :param S_c: Critical entropy :param fractions: List of molar fractions", "color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max,", "self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax)", "self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None return x, y,", "T_c, V_c, P_c, H_c, S_c = None, None, None, None, None # Set", "global P_max global T_min global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color =", "tp, T, P, T_c, P_c, fractions): \"\"\" Return plot data for a PT", "on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on)", "self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T crit_x = S_c crit_y", "p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None", "self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T crit_x = H_c crit_y", "data for a PH phase envelope :param tp: Thermopack instance :param P: Pressure", "import FigureCanvasQTAgg from matplotlib.figure import Figure from gui.utils import MessageBox import numpy as", "if not self.empty and self.isotherms: if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else:", "Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P crit_x", "# Set global variables, so that they are accessible in all phase envelope", "crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical T, V, P T_c, V_c, P_c", ":param H: Enthalpy values :param P_c: Critical pressure :param H_c: Critical enthalpy :param", "x, y, crit_x, crit_y def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions): \"\"\"", "= tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum pressure\"] step_size =", "nmax=nmax) if i == 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line,", "None label = \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1],", "plots. Contains all plot functionality for Plot Mode \"\"\" def __init__(self, components, plotting_preferences):", "self.isobars: if is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars:", "self.isotherms = None return x, y, crit_x, crit_y def plot_envelope_TS(self, tp, T, S,", "return x, y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy", "= T_c crit_y = P_c # Isenthalps, isentropes enthalpies = H_list entropies =", "P_c # Isenthalps, isentropes enthalpies = H_list entropies = S_list self.isenthalps = []", "handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T,", "else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i],", "H: Enthalpy values :param P_c: Critical pressure :param H_c: Critical enthalpy :param fractions:", "label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars =", "for v in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\")", "if not self.empty and self.isobars: if is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else:", "calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT,", "line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp,", "envelope :param tp: Thermopack instance :param P: Pressure values :param S: Entropy values", "\"\"\" Plots a binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings =", "self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum", "color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1],", "None, None # Set global variables, so that they are accessible in all", "in self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked):", "Return plot data for a PT phase envelope :param tp: Thermopack instance :param", "plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"]", "maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1],", "= calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press,", "plot_settings = self.plotting_preferences[\"Phase envelope\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max", "in the plot if a plot exists :param is_checked: Status of isobar button", "crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif prim_vars == \"PS\": x,", "Status of isobar button (bool) \"\"\" if not self.empty and self.isobars: if is_checked:", "z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in", "pressure :param fractions: List of molar fractions :return: x: x values for plot,", "PS phase envelope :param tp: Thermopack instance :param T: Temperature values :param H:", "S_c, fractions) else: return # Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on", "elif prim_vars == \"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c,", "Temperature values :param P: Pressure values :param T_c: Critical temperature :param P_c: Critical", "label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals =", "self.empty and self.isenthalps: if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else: for line", "line.set_linestyle(\"None\") self.draw() else: return def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a phase", "range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label = None label =", "point, crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4)", "i in range(len(VLE)): self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label = None", "plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"]", "self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals,", "nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line,", "None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\"", "minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))]) S", "= [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions,", "LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if", "self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P crit_x = S_c crit_y", "for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides / shows", "} title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on", "the plot if a plot exists :param is_checked: Status of isotherm button (bool)", ":param fractions: List of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings =", "= self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min =", "= T crit_x = S_c crit_y = T_c # Isenthalps, isobars pressures =", "instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T = calc_settings[\"Temperature\"]", "critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate critical variables try: T_c, V_c, P_c", "0: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color)", "linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings = { 1: \"I\", 2: \"II\",", "\"\"\" if not self.empty and self.isobars: if is_checked: for line in self.isobars: line.set_linestyle(\"solid\")", "enthalpies = H_list entropies = S_list self.isenthalps = [] self.isentropes = [] for", "try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c,", ":param T_c: Critical temperature :param P_c: Critical pressure :param fractions: List of molar", "P_c: Critical pressure :param fractions: List of molar fractions :return: x: x values", "calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color =", "self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax)", "ks_strings = { 1: \"I\", 2: \"II\", 3: \"III\", 4: \"IV\", 5: \"V\"", "for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked): \"\"\" Hides", "exists :param is_checked: Status of isobar button (bool) \"\"\" if not self.empty and", "in V_ph_env], P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for i", "List of molar fractions for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings", "t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if", "global H_list global T_list global S_list global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"]", "in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label) label = None label", "self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax)", "for line in self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\") self.draw() else:", "primary variables are chosen if prim_vars == \"PT\": x, y, crit_x, crit_y =", "True if legend: if n_isopleths > 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2],", "0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color)", "self.isenthalps = None self.isentropes = None self.isotherms = None self.isobars = None super(MplCanvas,", "V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c", "tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception", "if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, =", "self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp lines", "self.isentropes = [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i],", "variables, so that they are accessible in all phase envelope plot functions global", "values for plot, y: y values for plot, crit_x: x value for critical", "T: Temperature values :param S: Entropy values :param T_c: Critical temperature :param S_c:", "(e.g. PT, PH, ..) :param fractions: List of molar fractions for the components", "for T in T_list: P_list = [] for V in V_list: P, =", "line in self.isobars: line.set_linestyle(\"solid\") else: for line in self.isobars: line.set_linestyle(\"None\") self.draw() else: return", "fractions) for i in range(len(T))]) global H_list global T_list global S_list global P_list", "color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars = None return x, y, crit_x, crit_y", "i == 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals,", "h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\") self.isenthalps.append(h_line)", "crit_settings[\"Error tolerance\"] # Calculate critical T, V, P T_c, V_c, P_c = tp.critical(n=fractions,", ":param P_c: Critical pressure :param S_c: Critical entropy :param fractions: List of molar", "in all phase envelope plot functions global isopleth_1_color global isopleth_2_color global P_min global", "= np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P) * 0.60,", "T_c, S_c, fractions): \"\"\" Return plot data for a PS phase envelope :param", "tp: Thermopack instance :param P: Pressure values :param S: Entropy values :param P_c:", "# Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T", "else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self, is_checked): \"\"\"", "range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw()", "= P_c # isotherms, isentropes temperatures = T_list entropies = S_list self.isotherms =", "calc_settings[\"Temperature\"] p_max = calc_settings[\"Maximum pressure\"] p_min = calc_settings[\"Minimum pressure\"] dz_max = calc_settings[\"Maximum dz\"]", "= tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception as e:", "= plot_settings[\"y label\"] self.axes.plot([1 / v for v in V_ph_env], P_ph_env, label=\"Phase envelope\")", "# isotherms, isentropes temperatures = T_list entropies = S_list self.isotherms = [] self.isentropes", "and Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\", numpoints=1) legend.get_frame().set_linewidth(0.0)", "return def toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm lines in the plot", "self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T crit_x = S_c crit_y = T_c", "T_c, P_c, fractions) elif prim_vars == \"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp,", "= None return x, y, crit_x, crit_y def plot_envelope_PH(self, tp, P, H, P_c,", "P crit_x = H_c crit_y = P_c # isotherms, isentropes temperatures = T_list", "= critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate critical variables try: T_c, V_c,", "= \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label)", "color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms", "[crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the", "T, H, T_c, H_c, fractions) elif prim_vars == \"TS\": x, y, crit_x, crit_y", "color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel =", "crit_y = P_c # Isenthalps, isentropes enthalpies = H_list entropies = S_list self.isenthalps", "= tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume", "values :param T_c: Critical temperature :param S_c: Critical entropy :param fractions: List of", ":param tp: Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"]", ":param tp: Thermopack instance :param prim_vars: Primary variables for the plot (e.g. PT,", "def toggle_isotherms(self, is_checked): \"\"\" Hides / shows isotherm lines in the plot if", "= self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min,", "p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars = None return x, y, crit_x,", "minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, p_vals,", "self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif prim_vars == \"TH\": x, y, crit_x,", "0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color)", "self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T crit_x = H_c crit_y =", "values :param P_c: Critical pressure :param S_c: Critical entropy :param fractions: List of", "else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars = None", "(bool) \"\"\" if not self.empty and self.isobars: if is_checked: for line in self.isobars:", "and self.isobars: if is_checked: for line in self.isobars: line.set_linestyle(\"solid\") else: for line in", "V[i], fractions) for i in range(len(T))]) global H_list global T_list global S_list global", "S_c = None, None, None, None, None # Set global variables, so that", "else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides /", "n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H), n_isopleths) S_list = np.linspace(np.min(S),", "MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib plots. Contains all plot functionality for Plot", "P_lists.append(P_list) rho_list = 1 / V_list title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"]", "P: Pressure values :param T_c: Critical temperature :param P_c: Critical pressure :param fractions:", "if i == 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, =", "= \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1], label=label)", "accessible in all phase envelope plot functions global isopleth_1_color global isopleth_2_color global P_min", "envelope :param tp: Thermopack instance :param T: Temperature values :param S: Entropy values", "p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i", "L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is", "P, S, P_c, S_c, fractions): \"\"\" Return plot data for a PS phase", "crit_y = P_c # isotherms, isenthalps temperatures = T_list enthalpies = H_list self.isotherms", "= [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions,", "self.empty = True self.components = components self.isenthalps = None self.isentropes = None self.isotherms", "is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not", "plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if", "S y = T crit_x = S_c crit_y = T_c # Isenthalps, isobars", "is_checked): \"\"\" Hides / shows isenthalp lines in the plot if a plot", "x = S y = P crit_x = S_c crit_y = P_c #", "Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty = True self.components", "0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label = None label = \"AZ\" for", "entropies = S_list self.isobars = [] self.isentropes = [] for i in range(len(pressures)):", "fractions) elif prim_vars == \"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H,", "T_c: Critical temperature :param P_c: Critical pressure :param fractions: List of molar fractions", "P: Pressure values :param S: Entropy values :param P_c: Critical pressure :param S_c:", "= S y = T crit_x = S_c crit_y = T_c # Isenthalps,", "LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is", "label=label) label = None label = \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0],", "all phase envelope plot functions global isopleth_1_color global isopleth_2_color global P_min global P_max", "if i == 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, =", "[] self.isentropes = [] for i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals =", "P_ph_env, label=\"Phase envelope\") self.axes.scatter([1 / V_c], [P_c], label=\"Critical point\") for i in range(len(P_lists)):", "i in range(len(T))]) global H_list global T_list global S_list global P_list n_isopleths =", "= self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars = None return x,", "P_list enthalpies = H_list self.isenthalps = [] self.isobars = [] for i in", "\"<NAME> and Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\", numpoints=1)", "critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H", "pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T_ph_env, P_ph_env, V_ph_env", "x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else: return", "i == 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals,", "fractions) for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in", "global T_list global S_list global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list =", "global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max", "P_max global T_min global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3]", "self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax)", "labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else:", "label = None label = \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:,", "crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else: return # Plotting line_color", "is_checked: Status of isenthalp button (bool) \"\"\" if not self.empty and self.isenthalps: if", "= tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(h_vals,", "step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))]) S =", "= self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif prim_vars == \"PH\": x, y,", "label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots a", "= [] self.isentropes = [] for i in range(len(enthalpies)): t_vals, p_vals, v_vals, s_vals", "maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else:", "S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))]) global H_list global T_list", "V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions)", "np.linspace(V_start, V_end, V_num_points) P_lists = [] for T in T_list: P_list = []", "xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y,", "x values for plot, y: y values for plot, crit_x: x value for", "for a PH phase envelope :param tp: Thermopack instance :param P: Pressure values", "[] self.isobars = [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals =", "isentrope lines in the plot if a plot exists :param is_checked: Status of", ":param T_c: Critical temperature :param S_c: Critical entropy :param fractions: List of molar", "\"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y =", "self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the legend legend = True if legend:", "= plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"]", "0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label = None label = \"LLVE\" for", "button (bool) \"\"\" if not self.empty and self.isotherms: if is_checked: for line in", "for i in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label =", "T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v, tol=tol) H_c = tp.enthalpy_tv(T_c, V_c, fractions)", "= None self.isotherms = None return x, y, crit_x, crit_y def plot_envelope_TS(self, tp,", "= P crit_x = T_c crit_y = P_c # Isenthalps, isentropes enthalpies =", "else: return def toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar lines in the", "Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T crit_x", "isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min =", "= [] for V in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list)", "isentropes enthalpies = H_list entropies = S_list self.isenthalps = [] self.isentropes = []", "s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line)", "except Exception as e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c,", "= [] self.isobars = [] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals", "temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate critical", "value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x", "temperature\"] nmax = isopleth_settings[\"N max\"] # Plot depending on which primary variables are", "V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list", "step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] #", "for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0,", "self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PS(self, tp, P, S,", "H_c, fractions): \"\"\" Return plot data for a PH phase envelope :param tp:", "are chosen if prim_vars == \"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T,", "tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line, = self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\",", "prim_vars == \"TH\": x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c,", "self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals,", "plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max", "= self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial =", "V_c], [P_c], label=\"Critical point\") for i in range(len(P_lists)): self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \"", "None label = \"CRIT\" for i in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2],", "* calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points)", "plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v", "\"III\", 4: \"IV\", 5: \"V\" } title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"]", "self.isenthalps.append(h_line) self.isentropes = None self.isobars = None return x, y, crit_x, crit_y def", "= plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y", "grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel =", "= plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"] if title", "self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None return x, y, crit_x, crit_y def", "= tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(s_vals,", "nmax=nmax) if i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line,", "Entropy values :param P_c: Critical pressure :param S_c: Critical entropy :param fractions: List", "self.draw() def plot_global_binary(self, tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack", "= self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals,", "include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"] label = \"VLE\"", "p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None", "fractions) except Exception as e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c,", "Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel =", "= Figure(dpi=100) self.empty = True self.components = components self.isenthalps = None self.isentropes =", "self.isenthalps = None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PS(self,", "maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i in range(len(T))])", "None self.isotherms = None return x, y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\"", "z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol =", "v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range start\"] V_end", "= plot_settings[\"Colors\"][1] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y", "range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i", "= P_c # isotherms, isenthalps temperatures = T_list enthalpies = H_list self.isotherms =", "Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x = S y = P crit_x", "crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy plot :param tp:", "tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) H = np.array([tp.enthalpy_tv(T[i], V[i], fractions) for i", "v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate critical variables try: T_c,", "for critical point, crit_y: y value for critical point, \"\"\" # Display correct", "end\"] V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists = []", "import MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\" A canvas for matplotlib", "for line in self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else:", "components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase", "phase envelope :param tp: Thermopack instance :param T: Temperature values :param H: Enthalpy", "maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color,", "= None self.isotherms = None return x, y, crit_x, crit_y def plot_binary_pxy(self, tp):", "molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings =", "maximum_temperature=T_max, nmax=nmax) if i == 0: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color, label=\"Isenthalp\") else:", "def plot_envelope_TH(self, tp, T, H, T_c, H_c, fractions): \"\"\" Return plot data for", "\"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif", "= tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line,", "True self.components = components self.isenthalps = None self.isentropes = None self.isotherms = None", "calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE = tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min,", "t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isotherms = None return x, y, crit_x,", "point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x = H y", "isentropes pressures = P_list entropies = S_list self.isobars = [] self.isentropes = []", "Status of isotherm button (bool) \"\"\" if not self.empty and self.isotherms: if is_checked:", "x, y, crit_x, crit_y def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions): \"\"\"", "PS phase envelope :param tp: Thermopack instance :param T: Temperature values :param S:", "Plots a phase envelope :param tp: Thermopack instance :param prim_vars: Primary variables for", "L2VE[2], color=line_color) grid_on = plot_settings[\"Grid on\"] title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"]", "if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\")", "1: \"I\", 2: \"II\", 3: \"III\", 4: \"IV\", 5: \"V\" } title =", "V_end = V_c * calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"] V_list =", "= P_list enthalpies = H_list self.isenthalps = [] self.isobars = [] for i", "values :param S: Entropy values :param T_c: Critical temperature :param S_c: Critical entropy", "= tp.get_binary_pxy(temp=T, maximum_pressure=p_max, minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is not", "for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))])", "H y = P crit_x = H_c crit_y = P_c # isotherms, isentropes", "P, H, P_c, H_c, fractions) elif prim_vars == \"PS\": x, y, crit_x, crit_y", "fractions for the components \"\"\" tpv_settings = self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"]", "= self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars = None return x,", "Thermopack instance :param T: Temperature values :param H: Enthalpy values :param T_c: Critical", "PT phase envelope :param tp: Thermopack instance :param T: Temperature values :param P:", "= None self.isentropes = None self.isotherms = None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig)", "values :param H: Enthalpy values :param T_c: Critical temperature :param H_c: Critical enthalpy", "components, plotting_preferences): self.fig = Figure(dpi=100) self.empty = True self.components = components self.isenthalps =", "phase envelope :param tp: Thermopack instance :param P: Pressure values :param S: Entropy", "\"\"\" if not self.empty and self.isotherms: if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\")", "y = P crit_x = H_c crit_y = P_c # isotherms, isentropes temperatures", "crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol = crit_settings[\"Error tolerance\"] # Calculate critical", "calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"] plot_settings", "= None, None, None, None, None # Set global variables, so that they", "CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label = None label = \"AZ\" for i", "prim_vars == \"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c,", "S_c, fractions): \"\"\" Return plot data for a PS phase envelope :param tp:", "tpv_settings[\"Step size\"] # Calculate T, P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions,", "fractions): \"\"\" Return plot data for a PH phase envelope :param tp: Thermopack", ":param P_c: Critical pressure :param H_c: Critical enthalpy :param fractions: List of molar", "color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1],", "== \"PH\": x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions)", "= self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else: return # Plotting line_color =", "plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] grid_on = plot_settings[\"Grid on\"]", "self.axes.plot(VLE[i][:, 0], VLE[i][:, 1], linestyle=linestyles[0], color=colors[0], label=label) label = None label = \"LLVE\"", "and self.isentropes: if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else: for line in", "minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\")", "self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure density", "= critical_settings[\"Error tolerance\"] # Calculate critical variables try: T_c, V_c, P_c = tp.critical(n=fractions,", "msg.exec_() T_c, V_c, P_c, H_c, S_c = None, None, None, None, None #", "p_vals, color=isopleth_1_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals,", "is_checked: Status of isentrope button (bool) \"\"\" if not self.empty and self.isentropes: if", "Pressure values :param H: Enthalpy values :param P_c: Critical pressure :param H_c: Critical", "color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars", "self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure density plot", "envelope :param tp: Thermopack instance :param P: Pressure values :param H: Enthalpy values", "def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions): \"\"\" Return plot data for", "plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self,", "= None return x, y, crit_x, crit_y def plot_envelope_TS(self, tp, T, S, T_c,", "= self.axes.plot(s_vals, t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min,", "values :param H: Enthalpy values :param P_c: Critical pressure :param H_c: Critical enthalpy", "tol = critical_settings[\"Error tolerance\"] # Calculate critical variables try: T_c, V_c, P_c =", "tp, prim_vars, fractions): \"\"\" Plots a phase envelope :param tp: Thermopack instance :param", "= P crit_x = S_c crit_y = P_c # isotherms, isenthalps temperatures =", "self.isotherms = [] self.isentropes = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals,", "envelope :param tp: Thermopack instance :param prim_vars: Primary variables for the plot (e.g.", "entropies = S_list self.isotherms = [] self.isentropes = [] for i in range(len(temperatures)):", "H_c, S_c = None, None, None, None, None # Set global variables, so", "T_c, S_c, fractions) else: return # Plotting line_color = plot_settings[\"Colors\"][0] point_color = plot_settings[\"Colors\"][1]", "label = \"LLVE\" for i in range(len(LLVE)): self.axes.plot(LLVE[i][:, 0], LLVE[i][:, 1], linestyle=linestyles[1], color=colors[1],", "tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, =", "# Calculate T, P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min,", "None, None, None # Set global variables, so that they are accessible in", "not self.empty and self.isentropes: if is_checked: for line in self.isentropes: line.set_linestyle(\"solid\") else: for", "t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None", "correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P crit_x =", "color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals", "[] for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0,", "\"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure density\"][\"TPV\"] crit_settings = self.plotting_preferences[\"Pressure density\"][\"Critical\"]", "labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions):", "labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P, T_c,", "\"\"\" Return plot data for a PT phase envelope :param tp: Thermopack instance", "plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 / v for v in V_ph_env],", "V[i], fractions) for i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i", "Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Binary pxy\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Binary pxy\"][\"Plotting\"] T =", "x value for critical point, crit_y: y value for critical point, \"\"\" #", "\": title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\", numpoints=1) legend.get_frame().set_linewidth(0.0) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.grid(grid_on)", "* 0.60, np.max(P) * 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol", "azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors", "= tp.entropy_tv(T_c, V_c, fractions) except Exception as e: msg = MessageBox(\"Error\", str(e)) msg.exec_()", "if L2VE[0] is not None: self.axes.plot(L2VE[0], L2VE[2], color=line_color) self.axes.plot(L2VE[1], L2VE[2], color=line_color) grid_on =", "for i in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max)", "= isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax =", "import Figure from gui.utils import MessageBox import numpy as np class MplCanvas(FigureCanvasQTAgg): \"\"\"", "tp): \"\"\" Plots a binary pxy plot :param tp: Thermopack instance \"\"\" calc_settings", "phase envelope plot functions global isopleth_1_color global isopleth_2_color global P_min global P_max global", "range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i", "if a plot exists :param is_checked: Status of isentrope button (bool) \"\"\" if", "crit_y = self.plot_envelope_PS(tp, P, S, P_c, S_c, fractions) elif prim_vars == \"TH\": x,", "self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel)", "H_list self.isotherms = [] self.isenthalps = [] for i in range(len(temperatures)): p_vals, v_vals,", "v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line,", "P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess", "def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a phase envelope :param tp: Thermopack", "self.isotherms = None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PH(self,", "Exception as e: msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c", "temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list = calc_settings[\"Temperatures\"] V_start = V_c * calc_settings[\"Volume range start\"]", "== \"<NAME> and Scott type: \": title += ks_strings[KSTYPE] self.axes.set_title(title) legend = self.axes.legend(loc=\"best\",", "= self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps", "i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax)", "self.empty and self.isotherms: if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else: for line", "grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1", "label = None ks_strings = { 1: \"I\", 2: \"II\", 3: \"III\", 4:", "p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i", "S_list self.isotherms = [] self.isentropes = [] for i in range(len(temperatures)): p_vals, v_vals,", "tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess = crit_settings[\"Volume\"] crit_tol", "= T_list entropies = S_list self.isotherms = [] self.isentropes = [] for i", "isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N", "t_vals, color=\"#ffd2d2\") self.isobars.append(p_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min,", ":param T_c: Critical temperature :param H_c: Critical enthalpy :param fractions: List of molar", "a plot exists :param is_checked: Status of isotherm button (bool) \"\"\" if not", "y, crit_x, crit_y def plot_envelope_PS(self, tp, P, S, P_c, S_c, fractions): \"\"\" Return", "T, S, T_c, S_c, fractions) else: return # Plotting line_color = plot_settings[\"Colors\"][0] point_color", "= self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif prim_vars == \"TS\": x, y,", "minimum_pressure=p_min, maximum_dz=dz_max, maximum_dlns=dlns_max) line_color = plot_settings[\"Colors\"][0] if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2],", "self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max = tpv_settings[\"Maximum", "density\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"]", "buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x = H y = T crit_x = H_c", "i in range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))]) global", "y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True)", "point, crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2)", "= self.axes.plot(h_vals, t_vals, color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals,", "[] for V in V_list: P, = tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list", "y = P crit_x = T_c crit_y = P_c # Isenthalps, isentropes enthalpies", "S, P_c, S_c, fractions): \"\"\" Return plot data for a PS phase envelope", "fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color,", "PH phase envelope :param tp: Thermopack instance :param P: Pressure values :param H:", "for Plot Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100) self.empty =", "envelope plot functions global isopleth_1_color global isopleth_2_color global P_min global P_max global T_min", "y, crit_x, crit_y def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions): \"\"\" Return", "line in self.isentropes: line.set_linestyle(\"solid\") else: for line in self.isentropes: line.set_linestyle(\"None\") self.draw() else: return", "range(len(T))]) S = np.array([tp.entropy_tv(T[i], V[i], fractions) for i in range(len(T))]) global H_list global", "self.axes.set_ylabel(ylabel) # Sort entries in the legend legend = True if legend: if", "H_c, fractions): \"\"\" Return plot data for a PS phase envelope :param tp:", "self.draw() def plot_pressure_density(self, tp, fractions): \"\"\" Plots a pressure density plot :param tp:", "calc_settings[\"Volume range end\"] V_num_points = calc_settings[\"Num points\"] V_list = np.linspace(V_start, V_end, V_num_points) P_lists", "Critical pressure :param H_c: Critical enthalpy :param fractions: List of molar fractions :return:", "ylabel = plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x],", "return x, y, crit_x, crit_y def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions):", "isobar lines in the plot if a plot exists :param is_checked: Status of", "= V_c * calc_settings[\"Volume range start\"] V_end = V_c * calc_settings[\"Volume range end\"]", "in range(len(CRIT)): self.axes.plot(CRIT[i][:, 0], CRIT[i][:, 1], linestyle=linestyles[2], color=colors[2], label=label) label = None label", "fractions): \"\"\" Return plot data for a PS phase envelope :param tp: Thermopack", "pressure\"] dz_max = calc_settings[\"Maximum dz\"] dlns_max = calc_settings[\"Maximum dlns\"] LLE, L1VE, L2VE =", "if LLE[0] is not None: self.axes.plot(LLE[0], LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0]", "for line in self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else:", "point, crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3)", "Figure(dpi=100) self.empty = True self.components = components self.isenthalps = None self.isentropes = None", "x, y, crit_x, crit_y = self.plot_envelope_TH(tp, T, H, T_c, H_c, fractions) elif prim_vars", "minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\")", "\"TS\": x, y, crit_x, crit_y = self.plot_envelope_TS(tp, T, S, T_c, S_c, fractions) else:", "T, V, P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess, tol=crit_tol) T_list =", "tp.pressure_tv(temp=T, volume=V, n=fractions) P_list.append(P) P_lists.append(P_list) rho_list = 1 / V_list title = plot_settings[\"Title\"]", "T_list entropies = S_list self.isotherms = [] self.isentropes = [] for i in", "np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths) P_list =", "enthalpies = H_list self.isotherms = [] self.isenthalps = [] for i in range(len(temperatures)):", "entropy :param fractions: List of molar fractions :return: x: x values for plot,", "V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess = crit_settings[\"Temperature\"] crit_v_guess =", "H_c: Critical enthalpy :param fractions: List of molar fractions :return: x: x values", "y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True)", ":param is_checked: Status of isotherm button (bool) \"\"\" if not self.empty and self.isotherms:", "color=isopleth_1_color, label=\"Isobar\") else: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color) self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals", "line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw() def toggle_isentropes(self, is_checked): \"\"\" Hides", "self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None return x, y, crit_x, crit_y def", "global T_min global T_max global nmax isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min", "x = H y = P crit_x = H_c crit_y = P_c #", "2: \"II\", 3: \"III\", 4: \"IV\", 5: \"V\" } title = plot_settings[\"Title\"] xlabel", "exists :param is_checked: Status of isotherm button (bool) \"\"\" if not self.empty and", "= S_list self.isobars = [] self.isentropes = [] for i in range(len(pressures)): t_vals,", "fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(t_vals,", "self.axes.plot(s_vals, p_vals, color=isopleth_2_color) self.isenthalps.append(h_line) self.isentropes = None self.isobars = None return x, y,", "\"\"\" if not self.empty and self.isenthalps: if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\")", "0: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color, label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals, p_vals, color=isopleth_2_color)", "= tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes) colors = plot_settings[\"Colors\"] linestyles = [\"-\", \"--\", \":\", \"-.\"]", "self.components = components self.isenthalps = None self.isentropes = None self.isotherms = None self.isobars", "n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"] # Calculate", "pressure\"] P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"]", "isopleth_settings[\"N max\"] # Plot depending on which primary variables are chosen if prim_vars", "data for a PT phase envelope :param tp: Thermopack instance :param T: Temperature", "= isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] # Plot", "t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line)", "# Sort entries in the legend legend = True if legend: if n_isopleths", "T_max = isopleth_settings[\"Maximum temperature\"] nmax = isopleth_settings[\"N max\"] # Plot depending on which", "self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the legend legend = True", "S_list self.isenthalps = [] self.isentropes = [] for i in range(len(enthalpies)): t_vals, p_vals,", "in range(len(AZ)): self.axes.plot(AZ[i][:, 0], AZ[i][:, 1], linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings", "= T crit_x = H_c crit_y = T_c # isobars, isentropes pressures =", "color=point_color, label=\"Critical point\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) # Sort entries in the legend", "= plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel)", "self.draw() else: return def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a phase envelope", "self.isotherms: line.set_linestyle(\"solid\") else: for line in self.isotherms: line.set_linestyle(\"None\") self.draw() else: return def toggle_isobars(self,", "msg = MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c = None, None,", "color=\"#d5d3ff\") self.isenthalps.append(h_line) self.isentropes = None self.isotherms = None return x, y, crit_x, crit_y", "= None self.isotherms = None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences", "self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press = calc_settings[\"Minimum pressure\"] min_temp = calc_settings[\"Minimum temperature\"] azeotropes = calc_settings[\"Azeotropes\"]", "x, y, crit_x, crit_y = self.plot_envelope_PH(tp, P, H, P_c, H_c, fractions) elif prim_vars", "= None return x, y, crit_x, crit_y def plot_envelope_PS(self, tp, P, S, P_c,", "self.draw() else: return def toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar lines in", "envelope :param tp: Thermopack instance :param T: Temperature values :param H: Enthalpy values", "V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception as e: msg =", "None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_TH(self, tp, T,", ":param is_checked: Status of isobar button (bool) \"\"\" if not self.empty and self.isobars:", "= None return x, y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a", "np.linspace(np.min(S), np.max(S), n_isopleths) T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths) P_list", "range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i ==", "Thermopack instance \"\"\" calc_settings = self.plotting_preferences[\"Global binary\"][\"Calc\"] plot_settings = self.plotting_preferences[\"Global binary\"][\"Plotting\"] min_press =", "h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax) if i == 0:", "fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception as e: msg = MessageBox(\"Error\",", "on which primary variables are chosen if prim_vars == \"PT\": x, y, crit_x,", "= self.plotting_preferences[\"Pressure density\"][\"Plotting\"] p_initial = tpv_settings[\"Initial pressure\"] t_min = tpv_settings[\"Minimum temperature\"] p_max =", "Plots a pressure density plot :param tp: Thermopack instance :param fractions: List of", "T_c # isobars, isentropes pressures = P_list entropies = S_list self.isobars = []", ":param tp: Thermopack instance :param T: Temperature values :param S: Entropy values :param", "in the plot if a plot exists :param is_checked: Status of isotherm button", "ylabel = plot_settings[\"y label\"] self.axes.grid(grid_on) self.axes.set_title(title) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.draw() def plot_pressure_density(self, tp, fractions):", "= H_list entropies = S_list self.isenthalps = [] self.isentropes = [] for i", "tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0: t_line, = self.axes.plot(h_vals, p_vals,", "P, S, P_c, S_c, fractions) elif prim_vars == \"TH\": x, y, crit_x, crit_y", "title = plot_settings[\"Title\"] grid_on = plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel =", "Calculate T, P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size,", "fractions: List of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure density\"][\"Calc\"] tpv_settings = self.plotting_preferences[\"Pressure", "== 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(s_vals, p_vals,", "for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x =", "isopleth_1_color = plot_settings[\"Colors\"][2] isopleth_2_color = plot_settings[\"Colors\"][3] P_min = isopleth_settings[\"Minimum pressure\"] P_max = isopleth_settings[\"Maximum", "if is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\")", "self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T crit_x = S_c crit_y =", "P_max = isopleth_settings[\"Maximum pressure\"] T_min = isopleth_settings[\"Minimum temperature\"] T_max = isopleth_settings[\"Maximum temperature\"] nmax", "* calc_settings[\"Volume range start\"] V_end = V_c * calc_settings[\"Volume range end\"] V_num_points =", "color=isopleth_1_color, label=\"Isotherm\") else: t_line, = self.axes.plot(h_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals", "if i == 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, =", "# Calculate critical T, V, P T_c, V_c, P_c = tp.critical(n=fractions, temp=crit_t_guess, v=crit_v_guess,", "so that they are accessible in all phase envelope plot functions global isopleth_1_color", "crit_x, crit_y def plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions): \"\"\" Return plot", "Thermopack instance :param prim_vars: Primary variables for the plot (e.g. PT, PH, ..)", "H: Enthalpy values :param T_c: Critical temperature :param H_c: Critical enthalpy :param fractions:", "instance :param prim_vars: Primary variables for the plot (e.g. PT, PH, ..) :param", "T_list = np.linspace(np.min(T) * 0.60, np.max(T) * 1.40, n_isopleths) P_list = np.linspace(np.min(P) *", "y = T crit_x = H_c crit_y = T_c # isobars, isentropes pressures", "plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions): \"\"\" Return plot data for a", "in range(len(pressures)): t_vals, v_vals, s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i", "= components self.isenthalps = None self.isentropes = None self.isotherms = None self.isobars =", "self.isentropes = None self.isotherms = None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences =", ":param is_checked: Status of isentrope button (bool) \"\"\" if not self.empty and self.isentropes:", ":param tp: Thermopack instance :param T: Temperature values :param H: Enthalpy values :param", "for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(3) self.parent().parent().parent().TH_S_btn.setChecked(True) self.parent().parent().parent().TH_P_btn.setChecked(True) x =", "= tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T_ph_env,", "* 1.40, n_isopleths) temp = critical_settings[\"Temperature\"] v = critical_settings[\"Volume\"] tol = critical_settings[\"Error tolerance\"]", "= None self.isobars = None super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked):", "is_checked: for line in self.isenthalps: line.set_linestyle(\"solid\") else: for line in self.isenthalps: line.set_linestyle(\"None\") self.draw()", "instance :param T: Temperature values :param S: Entropy values :param T_c: Critical temperature", "loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions): \"\"\"", "Calculate T, P, V T, P, V = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size,", "global S_list global P_list n_isopleths = isopleth_settings[\"Number of isopleths\"] H_list = np.linspace(np.min(H), np.max(H),", "[] for T in T_list: P_list = [] for V in V_list: P,", "plot_envelope_TS(self, tp, T, S, T_c, S_c, fractions): \"\"\" Return plot data for a", "if prim_vars == \"PT\": x, y, crit_x, crit_y = self.plot_envelope_PT(tp, T, P, T_c,", "# Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P", "H, P_c, H_c, fractions) elif prim_vars == \"PS\": x, y, crit_x, crit_y =", "p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min,", "\"V\" } title = plot_settings[\"Title\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"]", "V_end, V_num_points) P_lists = [] for T in T_list: P_list = [] for", "plot_envelope_PT(self, tp, T, P, T_c, P_c, fractions): \"\"\" Return plot data for a", "H_c = tp.enthalpy_tv(T_c, V_c, fractions) S_c = tp.entropy_tv(T_c, V_c, fractions) except Exception as", "nmax=nmax) if i == 0: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line,", "self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots a binary pxy", "isenthalp lines in the plot if a plot exists :param is_checked: Status of", "i == 0: h_line, = self.axes.plot(s_vals, p_vals, color=\"#d5d3ff\", label=\"Isenthalp\") else: h_line, = self.axes.plot(s_vals,", "plot functionality for Plot Mode \"\"\" def __init__(self, components, plotting_preferences): self.fig = Figure(dpi=100)", "correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(4) self.parent().parent().parent().TS_P_btn.setChecked(True) self.parent().parent().parent().TS_H_btn.setChecked(True) x = S y = T crit_x =", "y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True)", "> 0: handles, labels = self.axes.get_legend_handles_labels() self.axes.legend([handles[3], handles[2], handles[0], handles[1]], [labels[3], labels[2], labels[0],", "self.axes.plot(h_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps =", "not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color) self.axes.plot(L1VE[1], L1VE[2], color=line_color) if L2VE[0] is not None:", "(bool) \"\"\" if not self.empty and self.isenthalps: if is_checked: for line in self.isenthalps:", "tp: Thermopack instance :param T: Temperature values :param P: Pressure values :param T_c:", "x, y, crit_x, crit_y def plot_envelope_PH(self, tp, P, H, P_c, H_c, fractions): \"\"\"", "return def plot_envelope(self, tp, prim_vars, fractions): \"\"\" Plots a phase envelope :param tp:", "else: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars = None", "self.isobars.append(p_line) t_vals, p_vals, v_vals, h_vals = tp.get_isentrope(entropies[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, minimum_temperature=T_min, maximum_temperature=T_max, nmax=nmax)", "s_vals, h_vals = tp.get_isobar(pressures[i], fractions, minimum_temperature=T_min, maximum_temperature=T_max) if i == 0: p_line, =", "value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True) self.parent().parent().parent().PH_S_btn.setChecked(True) x", "label=\"Isenthalp\") else: h_line, = self.axes.plot(t_vals, p_vals, color=isopleth_1_color) self.isenthalps.append(h_line) t_vals, p_vals, v_vals, h_vals =", "button (bool) \"\"\" if not self.empty and self.isentropes: if is_checked: for line in", "crit_y: y value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(1) self.parent().parent().parent().PH_T_btn.setChecked(True)", "plot if a plot exists :param is_checked: Status of isentrope button (bool) \"\"\"", "P, V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess", "for i in range(len(T))]) global H_list global T_list global S_list global P_list n_isopleths", "LLE[2], color=line_color) self.axes.plot(LLE[1], LLE[2], color=line_color) if L1VE[0] is not None: self.axes.plot(L1VE[0], L1VE[2], color=line_color)", "1], linestyle=linestyles[3], color=colors[3], label=label) label = None ks_strings = { 1: \"I\", 2:", "self.axes.plot(t_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isotherms = None self.isobars = None return x, y,", "= [] for T in T_list: P_list = [] for V in V_list:", "self.plot_envelope_PT(tp, T, P, T_c, P_c, fractions) elif prim_vars == \"PH\": x, y, crit_x,", "temperature\"] azeotropes = calc_settings[\"Azeotropes\"] KSTYPE, VLE, LLVE, CRIT, AZ = tp.global_binary_plot(minimum_pressure=min_press, minimum_temperature=min_temp, include_azeotropes=azeotropes)", "= [] self.isentropes = [] for i in range(len(temperatures)): p_vals, v_vals, s_vals, h_vals", "self.axes.plot(rho_list, P_lists[i], label=str(T_list[i]) + \" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def", "title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color, label=\"Critical point\")", "[labels[3], labels[2], labels[0], labels[1]], loc=\"best\") else: self.axes.legend() self.draw() def plot_envelope_PT(self, tp, T, P,", "tpv_settings[\"Maximum pressure\"] step_size = tpv_settings[\"Step size\"] # Calculate T, P, V T_ph_env, P_ph_env,", "buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(0) self.parent().parent().parent().PT_H_btn.setChecked(True) self.parent().parent().parent().PT_S_btn.setChecked(True) x = T y = P crit_x = T_c", "toggle_isobars(self, is_checked): \"\"\" Hides / shows isobar lines in the plot if a", "= self.axes.plot(s_vals, p_vals, color=isopleth_1_color) self.isotherms.append(t_line) t_vals, p_vals, v_vals, s_vals = tp.get_isenthalp(enthalpies[i], fractions, minimum_pressure=P_min,", "Critical pressure :param S_c: Critical entropy :param fractions: List of molar fractions :return:", "crit_y = T_c # isobars, isentropes pressures = P_list entropies = S_list self.isobars", "color=colors[2], label=label) label = None label = \"AZ\" for i in range(len(AZ)): self.axes.plot(AZ[i][:,", "x, y, crit_x, crit_y def plot_binary_pxy(self, tp): \"\"\" Plots a binary pxy plot", "i == 0: s_line, = self.axes.plot(h_vals, t_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(h_vals,", ":param tp: Thermopack instance :param P: Pressure values :param S: Entropy values :param", ":param fractions: List of molar fractions :return: x: x values for plot, y:", "/ shows isobar lines in the plot if a plot exists :param is_checked:", "i == 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else: s_line, = self.axes.plot(t_vals,", "depending on which primary variables are chosen if prim_vars == \"PT\": x, y,", "MessageBox(\"Error\", str(e)) msg.exec_() T_c, V_c, P_c, H_c, S_c = None, None, None, None,", "maximum_temperature=T_max, nmax=nmax) if i == 0: s_line, = self.axes.plot(t_vals, p_vals, color=isopleth_2_color, label=\"Isentrope\") else:", "V T_ph_env, P_ph_env, V_ph_env = tp.get_envelope_twophase(initial_pressure=p_initial, z=fractions, maximum_pressure=p_max, minimum_temperature=t_min, step_size=step_size, calc_v=True) crit_t_guess =", "instance :param P: Pressure values :param S: Entropy values :param P_c: Critical pressure", "tolerance\"] # Calculate critical variables try: T_c, V_c, P_c = tp.critical(n=fractions, temp=temp, v=v,", "plot functions global isopleth_1_color global isopleth_2_color global P_min global P_max global T_min global", "\" K\") self.axes.set_title(title) self.axes.grid(grid_on) self.axes.set_xlabel(xlabel) self.axes.set_ylabel(ylabel) self.axes.legend(loc=\"best\") self.draw() def plot_global_binary(self, tp): \"\"\" Plots", "v_vals, s_vals, h_vals = tp.get_isotherm(temperatures[i], fractions, minimum_pressure=P_min, maximum_pressure=P_max, nmax=nmax) if i == 0:", "super(MplCanvas, self).__init__(figure=self.fig) self.plotting_preferences = plotting_preferences def toggle_isenthalps(self, is_checked): \"\"\" Hides / shows isenthalp", "crit_x = H_c crit_y = T_c # isobars, isentropes pressures = P_list entropies", "tp: Thermopack instance :param fractions: List of molar fractions \"\"\" calc_settings = self.plotting_preferences[\"Pressure", "plot_settings[\"y label\"] title = plot_settings[\"Title\"] self.axes.plot(x, y, color=line_color, label=\"Phase envelope\") self.axes.scatter([crit_x], [crit_y], color=point_color,", "P_c # isotherms, isenthalps temperatures = T_list enthalpies = H_list self.isotherms = []", "None return x, y, crit_x, crit_y def plot_envelope_TS(self, tp, T, S, T_c, S_c,", "if i == 0: t_line, = self.axes.plot(s_vals, p_vals, color=isopleth_1_color, label=\"Isotherm\") else: t_line, =", "= plot_settings[\"Grid on\"] xlabel = plot_settings[\"x label\"] ylabel = plot_settings[\"y label\"] self.axes.plot([1 /", "value for critical point, \"\"\" # Display correct buttons self.parent().parent().parent().isopleth_btn_stack.setCurrentIndex(2) self.parent().parent().parent().PS_T_btn.setChecked(True) self.parent().parent().parent().PS_H_btn.setChecked(True) x", "self.plotting_preferences[\"Phase envelope\"][\"TPV\"] isopleth_settings = self.plotting_preferences[\"Phase envelope\"][\"Isopleths\"] critical_settings = self.plotting_preferences[\"Phase envelope\"][\"Critical\"] plot_settings = self.plotting_preferences[\"Phase", "else: s_line, = self.axes.plot(h_vals, p_vals, color=isopleth_2_color) self.isentropes.append(s_line) self.isenthalps = None self.isobars = None", "= None self.isobars = None return x, y, crit_x, crit_y def plot_envelope_PH(self, tp,", "not self.empty and self.isotherms: if is_checked: for line in self.isotherms: line.set_linestyle(\"solid\") else: for", "fractions, minimum_temperature=200.0, maximum_temperature=500.0, nmax=100) if i == 0: p_line, = self.axes.plot(h_vals, t_vals, color=isopleth_1_color," ]
[ "pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up() t.rt(90)", "\"normal\")) # Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill()", "t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60)", "Title on the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260)", "t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\")", "t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120)", "s = turtle.Screen() t = turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on", "t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid", "pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting", "t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90)", "t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in range(5): t.forward(40) t.right(144)", "t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle t.lt(180) t.fd(60) t.lt(90)", "t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20)", "Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120)", "t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80)", "t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30)", "t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\")", "t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill()", "Tree\") s.setup(width=800, height=600) # Title on the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\")", "t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle", "t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill()", "t.color(\"yellow\") t.down() t.begin_fill() for i in range(5): t.forward(40) t.right(144) t.end_fill() t.hideturtle() while True:", "# Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30)", "t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120)", "Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90)", "the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\",", "t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First", "t.down() t.begin_fill() for i in range(5): t.forward(40) t.right(144) t.end_fill() t.hideturtle() while True: s.update()", "t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up()", "import turtle s = turtle.Screen() t = turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) #", "# Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down()", "pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up()", "t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90)", "t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90)", "t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15)", "t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120)", "t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120)", "t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120)", "t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20)", "turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on the window pen = turtle.Turtle()", "t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40)", "t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in range(5): t.forward(40)", "s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on the window pen = turtle.Turtle() pen.speed(0)", "t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60)", "t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20)", "turtle s = turtle.Screen() t = turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title", "turtle.Screen() t = turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on the window", "t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second", "t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20)", "t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle", "Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill()", "t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90)", "= turtle.Screen() t = turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on the", "t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill()", "t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90)", "pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position", "t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180)", "t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in range(5):", "t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle", "t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill()", "pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24,", "# Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120)", "t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180)", "t.fd(60) t.end_fill() t.up() # First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill()", "position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60)", "t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill()", "t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down()", "t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill()", "t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up()", "Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() #", "# First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120)", "on the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas", "t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in range(5): t.forward(40) t.right(144) t.end_fill()", "24, \"normal\")) # Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\")", "Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70)", "t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120)", "t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80)", "triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120)", "t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120)", "t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90)", "s.setup(width=800, height=600) # Title on the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup()", "t.fd(80) t.end_fill() t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down()", "= turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on the window pen =", "t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35)", "turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) #", "# Title on the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0,", "t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() #", "align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump", "t.up() # First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80)", "t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80) t.lt(120) t.fd(80) t.end_fill() t.up() #", "t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle t.lt(180) t.fd(60)", "Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120)", "t.fd(30) t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for", "t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in", "Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() #", "height=600) # Title on the window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle()", "t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in range(5): t.forward(40) t.right(144) t.end_fill() t.hideturtle() while", "t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up() # First triangle t.lt(180)", "= turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\"))", "t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35)", "t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70)", "t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up() #", "t.up() # Second Triangle t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35)", "t.fd(35) t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90)", "t.end_fill() t.up() # Thrid Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down()", "t.lt(180) t.fd(80) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70)", "t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i in range(5): t.forward(40) t.right(144) t.end_fill() t.hideturtle()", "First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180) t.fd(80) t.lt(120) t.fd(80)", "# Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40)", "t.up() t.rt(90) t.fd(100) t.lt(90) t.down() # Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90)", "t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star", "t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120) t.fd(30) t.end_fill() t.up() # Star t.fd(30)", "t.end_fill() t.up() # First triangle t.lt(180) t.fd(60) t.lt(90) t.fd(20) t.down() t.color(\"green\") t.begin_fill() t.rt(180)", "t.rt(120) t.fd(60) t.lt(120) t.rt(180) t.lt(90) t.fd(15) t.rt(90) t.back(20) t.color(\"yellow\") t.down() t.begin_fill() for i", "window pen = turtle.Turtle() pen.speed(0) pen.color(\"black\") pen.penup() pen.hideturtle() pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\",", "# Stump t.color(\"brown\") t.begin_fill() t.fd(40) t.lt(90) t.fd(60) t.lt(90) t.fd(40) t.lt(90) t.fd(60) t.end_fill() t.up()", "t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60) t.rt(120)", "t = turtle.Turtle() s.title(\"Christmas Tree\") s.setup(width=800, height=600) # Title on the window pen", "pen.goto(0, 260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up() t.rt(90) t.fd(100)", "260) pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up() t.rt(90) t.fd(100) t.lt(90)", "pen.write(\"Christmas Tree\", align=\"center\",font=(\"Arial\", 24, \"normal\")) # Starting position t.up() t.rt(90) t.fd(100) t.lt(90) t.down()", "t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(35) t.rt(120) t.fd(70) t.rt(120) t.fd(70) t.rt(120) t.fd(35) t.end_fill() t.up()", "Triangle t.fd(35) t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60)", "t.rt(120) t.fd(70) t.lt(120) t.lt(90) t.fd(20) t.rt(90) t.down() t.begin_fill() t.fd(30) t.rt(120) t.fd(60) t.rt(120) t.fd(60)" ]
[ "use this file except in # compliance with the Apache License and the", "it: # Section 6. Trademarks. is deleted and replaced with: # # 6.", "is deleted and replaced with: # # 6. Trademarks. This License does not", "is # distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "language governing permissions and limitations under the Apache License. from pxr import Usd,", "Unless required by applicable law or agreed to in writing, software # distributed", "the Apache License. from pxr import Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase):", "unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\" # sum(vertexCounts)", "numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Negative vertex", "self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Valid topology. faceVertexIndices", "numPoints=6) self.assertTrue(valid) # Shoult not have set a reason. self.assertFalse(why) if __name__ ==", "the NOTICE file. # # You may obtain a copy of the Apache", "Licensed under the Apache License, Version 2.0 (the \"Apache License\") # with the", "Apache License and the following modification to it: # Section 6. Trademarks. is", "for validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts =", "OR CONDITIONS OF ANY # KIND, either express or implied. See the Apache", "indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid)", "valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have set a reason.", "reason. self.assertTrue(why) # Out of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts =", "UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have set a reason. self.assertFalse(why) if", "KIND, either express or implied. See the Apache License for the specific #", "faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) #", "# Out of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why", "either express or implied. See the Apache License for the specific # language", "self.assertTrue(why) # Out of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3])", "# 6. Trademarks. This License does not grant permission to use the trade", "2017 Pixar # # Licensed under the Apache License, Version 2.0 (the \"Apache", "specific # language governing permissions and limitations under the Apache License. from pxr", "# # You may obtain a copy of the Apache License at #", "You may obtain a copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0", "Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a", "# Copyright 2017 Pixar # # Licensed under the Apache License, Version 2.0", "Licensor # and its affiliates, except as required to comply with Section 4(c)", "Out of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why =", "the Licensor # and its affiliates, except as required to comply with Section", "Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts,", "class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\" # sum(vertexCounts) !=", "Section 6. Trademarks. is deleted and replaced with: # # 6. Trademarks. This", "following modification; you may not use this file except in # compliance with", "Section 4(c) of # the License and to reproduce the content of the", "the License and to reproduce the content of the NOTICE file. # #", "# Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,", "Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure", "in writing, software # distributed under the Apache License with the above modification", "we have a reason. self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts =", "# names, trademarks, service marks, or product names of the Licensor # and", "# # Licensed under the Apache License, Version 2.0 (the \"Apache License\") #", "Apache License with the above modification is # distributed on an \"AS IS\"", "modification is # distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "a reason. self.assertTrue(why) # Out of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts", "to in writing, software # distributed under the Apache License with the above", "faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Negative", "self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Out of range", "copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "and the following modification to it: # Section 6. Trademarks. is deleted and", "modification to it: # Section 6. Trademarks. is deleted and replaced with: #", "service marks, or product names of the Licensor # and its affiliates, except", "its affiliates, except as required to comply with Section 4(c) of # the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied.", "have a reason. self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3])", "of the NOTICE file. # # You may obtain a copy of the", "# distributed under the Apache License with the above modification is # distributed", "distributed under the Apache License with the above modification is # distributed on", "to use the trade # names, trademarks, service marks, or product names of", "faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "or agreed to in writing, software # distributed under the Apache License with", "License for the specific # language governing permissions and limitations under the Apache", "self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,", "Copyright 2017 Pixar # # Licensed under the Apache License, Version 2.0 (the", "Apache License, Version 2.0 (the \"Apache License\") # with the following modification; you", "# compliance with the Apache License and the following modification to it: #", "= Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult", "from pxr import Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests", "a reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3])", "obtain a copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "have a reason. self.assertTrue(why) # Out of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3])", "and limitations under the Apache License. from pxr import Usd, UsdGeom, Vt import", "= UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have set a reason. self.assertFalse(why)", "except in # compliance with the Apache License and the following modification to", "a reason. self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why", "the trade # names, trademarks, service marks, or product names of the Licensor", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "under the Apache License. from pxr import Usd, UsdGeom, Vt import unittest class", "trademarks, service marks, or product names of the Licensor # and its affiliates,", "law or agreed to in writing, software # distributed under the Apache License", "to comply with Section 4(c) of # the License and to reproduce the", "ANY # KIND, either express or implied. See the Apache License for the", "may obtain a copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "Apache License. from pxr import Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def", "of the Licensor # and its affiliates, except as required to comply with", "# # Copyright 2017 Pixar # # Licensed under the Apache License, Version", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "See the Apache License for the specific # language governing permissions and limitations", "vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3)", "topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why =", "the above modification is # distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) #", "6. Trademarks. is deleted and replaced with: # # 6. Trademarks. This License", "writing, software # distributed under the Apache License with the above modification is", "indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid)", "= Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have", "of range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,", "numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Out of", "the specific # language governing permissions and limitations under the Apache License. from", "OF ANY # KIND, either express or implied. See the Apache License for", "This License does not grant permission to use the trade # names, trademarks,", "names, trademarks, service marks, or product names of the Licensor # and its", "Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure", "Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a", "# Shoult not have set a reason. self.assertFalse(why) if __name__ == '__main__': unittest.main()", "# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "agreed to in writing, software # distributed under the Apache License with the", "names of the Licensor # and its affiliates, except as required to comply", "def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices", "= Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have", "# Make sure we have a reason. self.assertTrue(why) # Valid topology. faceVertexIndices =", "faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Out", "validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2])", "the following modification; you may not use this file except in # compliance", "you may not use this file except in # compliance with the Apache", "2.0 (the \"Apache License\") # with the following modification; you may not use", "the Apache License for the specific # language governing permissions and limitations under", "UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) #", "Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\"", "# sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices,", "under the Apache License, Version 2.0 (the \"Apache License\") # with the following", "Trademarks. This License does not grant permission to use the trade # names,", "modification; you may not use this file except in # compliance with the", "License\") # with the following modification; you may not use this file except", "affiliates, except as required to comply with Section 4(c) of # the License", "required to comply with Section 4(c) of # the License and to reproduce", "we have a reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts", "faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Valid", "not use this file except in # compliance with the Apache License and", "the following modification to it: # Section 6. Trademarks. is deleted and replaced", "# You may obtain a copy of the Apache License at # #", "faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) #", "software # distributed under the Apache License with the above modification is #", "CONDITIONS OF ANY # KIND, either express or implied. See the Apache License", "# # Unless required by applicable law or agreed to in writing, software", "# Make sure we have a reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices", "and its affiliates, except as required to comply with Section 4(c) of #", "Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6)", "Make sure we have a reason. self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5])", "the Apache License with the above modification is # distributed on an \"AS", "# Unless required by applicable law or agreed to in writing, software #", "a copy of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "self.assertTrue(valid) # Shoult not have set a reason. self.assertFalse(why) if __name__ == '__main__':", "file. # # You may obtain a copy of the Apache License at", "self.assertTrue(why) # Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why =", "by applicable law or agreed to in writing, software # distributed under the", "in # compliance with the Apache License and the following modification to it:", "Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have set a", "we have a reason. self.assertTrue(why) # Out of range vertex indices. faceVertexIndices =", "Make sure we have a reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices =", "License and the following modification to it: # Section 6. Trademarks. is deleted", "# language governing permissions and limitations under the Apache License. from pxr import", "UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh", "\"\"\"Tests helpers for validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2])", "with: # # 6. Trademarks. This License does not grant permission to use", "# Section 6. Trademarks. is deleted and replaced with: # # 6. Trademarks.", "of the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "sure we have a reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1])", "# Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts,", "Apache License for the specific # language governing permissions and limitations under the", "pxr import Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers", "not grant permission to use the trade # names, trademarks, service marks, or", "file except in # compliance with the Apache License and the following modification", "Make sure we have a reason. self.assertTrue(why) # Out of range vertex indices.", "permission to use the trade # names, trademarks, service marks, or product names", "replaced with: # # 6. Trademarks. This License does not grant permission to", "Pixar # # Licensed under the Apache License, Version 2.0 (the \"Apache License\")", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure", "or implied. See the Apache License for the specific # language governing permissions", "range vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts,", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", "faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we", "self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Negative vertex indices.", "TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices)", "the Apache License, Version 2.0 (the \"Apache License\") # with the following modification;", "# # 6. Trademarks. This License does not grant permission to use the", "express or implied. See the Apache License for the specific # language governing", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "this file except in # compliance with the Apache License and the following", "= Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the", "with the above modification is # distributed on an \"AS IS\" BASIS, WITHOUT", "above modification is # distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "permissions and limitations under the Apache License. from pxr import Usd, UsdGeom, Vt", "compliance with the Apache License and the following modification to it: # Section", "trade # names, trademarks, service marks, or product names of the Licensor #", "!= len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3)", "grant permission to use the trade # names, trademarks, service marks, or product", "comply with Section 4(c) of # the License and to reproduce the content", "the Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a reason.", "= Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make", "faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) #", "\"Apache License\") # with the following modification; you may not use this file", "under the Apache License with the above modification is # distributed on an", "except as required to comply with Section 4(c) of # the License and", "reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts = Vt.IntArray([3]) valid,why", "implied. See the Apache License for the specific # language governing permissions and", "License does not grant permission to use the trade # names, trademarks, service", "the Apache License and the following modification to it: # Section 6. Trademarks.", "NOTICE file. # # You may obtain a copy of the Apache License", "reproduce the content of the NOTICE file. # # You may obtain a", "License, Version 2.0 (the \"Apache License\") # with the following modification; you may", "to it: # Section 6. Trademarks. is deleted and replaced with: # #", "have a reason. self.assertTrue(why) # Negative vertex indices. faceVertexIndices = Vt.IntArray([0,-1,1]) faceVertexCounts =", "import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\" #", "does not grant permission to use the trade # names, trademarks, service marks,", "= Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make", "or product names of the Licensor # and its affiliates, except as required", "with Section 4(c) of # the License and to reproduce the content of", "numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why) # Valid topology.", "applicable law or agreed to in writing, software # distributed under the Apache", "License. from pxr import Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self):", "len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid)", "required by applicable law or agreed to in writing, software # distributed under", "sure we have a reason. self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts", "#!/pxrpythonsubst # # Copyright 2017 Pixar # # Licensed under the Apache License,", "marks, or product names of the Licensor # and its affiliates, except as", "Version 2.0 (the \"Apache License\") # with the following modification; you may not", "and to reproduce the content of the NOTICE file. # # You may", "to reproduce the content of the NOTICE file. # # You may obtain", "sure we have a reason. self.assertTrue(why) # Out of range vertex indices. faceVertexIndices", "# the License and to reproduce the content of the NOTICE file. #", "Trademarks. is deleted and replaced with: # # 6. Trademarks. This License does", "with the following modification; you may not use this file except in #", "# with the following modification; you may not use this file except in", "Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not", "sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts,", "following modification to it: # Section 6. Trademarks. is deleted and replaced with:", "the content of the NOTICE file. # # You may obtain a copy", "(the \"Apache License\") # with the following modification; you may not use this", "# KIND, either express or implied. See the Apache License for the specific", "limitations under the Apache License. from pxr import Usd, UsdGeom, Vt import unittest", "of # the License and to reproduce the content of the NOTICE file.", "with the Apache License and the following modification to it: # Section 6.", "# and its affiliates, except as required to comply with Section 4(c) of", "mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts = Vt.IntArray([2,2]) valid,why", "License with the above modification is # distributed on an \"AS IS\" BASIS,", "topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid)", "governing permissions and limitations under the Apache License. from pxr import Usd, UsdGeom,", "4(c) of # the License and to reproduce the content of the NOTICE", "6. Trademarks. This License does not grant permission to use the trade #", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See", "for the specific # language governing permissions and limitations under the Apache License.", "deleted and replaced with: # # 6. Trademarks. This License does not grant", "Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for validating", "= UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we have a reason. self.assertTrue(why)", "content of the NOTICE file. # # You may obtain a copy of", "test_ValidateTopology(self): \"\"\"Tests helpers for validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices =", "vertex indices. faceVertexIndices = Vt.IntArray([1,2,3]) faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3)", "License and to reproduce the content of the NOTICE file. # # You", "faceVertexCounts = Vt.IntArray([3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=3) self.assertFalse(valid) # Make sure we", "Apache License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "product names of the Licensor # and its affiliates, except as required to", "reason. self.assertTrue(why) # Valid topology. faceVertexIndices = Vt.IntArray([0,1,2,3,4,5]) faceVertexCounts = Vt.IntArray([3,3]) valid,why =", "and replaced with: # # 6. Trademarks. This License does not grant permission", "use the trade # names, trademarks, service marks, or product names of the", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "# Make sure we have a reason. self.assertTrue(why) # Out of range vertex", "helpers for validating mesh topology.\"\"\" # sum(vertexCounts) != len(vertexIndices) faceVertexIndices = Vt.IntArray([0,1,2]) faceVertexCounts", "may not use this file except in # compliance with the Apache License", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "import Usd, UsdGeom, Vt import unittest class TestUsdGeomMesh(unittest.TestCase): def test_ValidateTopology(self): \"\"\"Tests helpers for", "= Vt.IntArray([3,3]) valid,why = UsdGeom.Mesh.ValidateTopology(faceVertexIndices, faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have set", "faceVertexCounts, numPoints=6) self.assertTrue(valid) # Shoult not have set a reason. self.assertFalse(why) if __name__", "# Licensed under the Apache License, Version 2.0 (the \"Apache License\") # with", "as required to comply with Section 4(c) of # the License and to" ]
[ "at # | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import", "Interface from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name =", "InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo", "( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a", "= ( match value Add(1, 2): case 1, 2: \"Too low\" case 3:", "import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ----------------------------------------------------------------------", "Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]: return cls._ExtractParserInfoImpl( MatchValueExpressionParserInfo, MatchValueCasePhraseParserInfo,", "Add(1, 2): case 1, 2: \"Too low\" case 3: \"Correct\" default: \"Way off!\"", "\"\"\"Contains the MatchValueExpression object\"\"\" import os from typing import Callable, Tuple, Union import", "AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class", "match expression. Examples: str_value = ( match value Add(1, 2): case 1, 2:", "2: \"Too low\" case 3: \"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME =", "a match expression. Examples: str_value = ( match value Add(1, 2): case 1,", "# | # | MatchValueExpression.py # | # | <NAME> <<EMAIL>> # |", "_script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo", "Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def", "def ExtractParserInfo( cls, node: AST.Node, ) -> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo,", "import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ----------------------------------------------------------------------", ") \"\"\" PHRASE_NAME = \"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions,", "# | # | <NAME> <<EMAIL>> # | 2021-10-12 10:28:57 # | #", "cls, node: AST.Node, ) -> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]],", "---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match expression. Examples: str_value =", "| # | MatchValueExpression.py # | # | <NAME> <<EMAIL>> # | 2021-10-12", "3: \"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME = \"Match Value Expression\" #", "case 1, 2: \"Too low\" case 3: \"Correct\" default: \"Way off!\" ) \"\"\"", "| # | <NAME> <<EMAIL>> # | 2021-10-12 10:28:57 # | # ----------------------------------------------------------------------", "| # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os from typing import Callable,", "PHRASE_NAME = \"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) #", "__init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node,", "<<EMAIL>> # | 2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- # | # |", "| Copyright <NAME> 2021 # | Distributed under the Boost Software License, Version", "the Boost Software License, Version 1.0. See # | accompanying file LICENSE_1_0.txt or", "import CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath", "# | # | Copyright <NAME> 2021 # | Distributed under the Boost", "MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match expression. Examples: str_value = ( match", "typing import Callable, Tuple, Union import CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package", "Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override", "MatchValueExpression object\"\"\" import os from typing import Callable, Tuple, Union import CommonEnvironment from", "# ---------------------------------------------------------------------- # | # | MatchValueExpression.py # | # | <NAME> <<EMAIL>>", "node: AST.Node, ) -> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]:", "from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based", "..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import (", "# ---------------------------------------------------------------------- # | # | Copyright <NAME> 2021 # | Distributed under", "10:28:57 # | # ---------------------------------------------------------------------- # | # | Copyright <NAME> 2021 #", "2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- # | # | Copyright <NAME> 2021", "import Callable, Tuple, Union import CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package import", "MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match", "from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import", ") -> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]: return cls._ExtractParserInfoImpl(", "import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo,", "of a match expression. Examples: str_value = ( match value Add(1, 2): case", "http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os from typing", "MatchValueExpression.py # | # | <NAME> <<EMAIL>> # | 2021-10-12 10:28:57 # |", "= \"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ----------------------------------------------------------------------", "| # ---------------------------------------------------------------------- # | # | Copyright <NAME> 2021 # | Distributed", "Copyright <NAME> 2021 # | Distributed under the Boost Software License, Version 1.0.", "License, Version 1.0. See # | accompanying file LICENSE_1_0.txt or copy at #", "self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node, ) ->", "CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir,", "# ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match expression. Examples: str_value", "None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]: return cls._ExtractParserInfoImpl( MatchValueExpressionParserInfo, MatchValueCasePhraseParserInfo, node,", "2021 # | Distributed under the Boost Software License, Version 1.0. See #", "---------------------------------------------------------------------- # | # | MatchValueExpression.py # | # | <NAME> <<EMAIL>> #", "\"Too low\" case 3: \"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME = \"Match", "1.0. See # | accompanying file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt.", "---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node, ) -> Union[ None, ParserInfo,", "_script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase", "| 2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- # | # | Copyright <NAME>", "# | 2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- # | # | Copyright", "# | MatchValueExpression.py # | # | <NAME> <<EMAIL>> # | 2021-10-12 10:28:57", "import Interface from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name", "# ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node, ) -> Union[ None,", "value Add(1, 2): case 1, 2: \"Too low\" case 3: \"Correct\" default: \"Way", "file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains", "# ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo(", "Callable, Tuple, Union import CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports", "= os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import", "Tuple, Union import CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports #", "\"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME = \"Match Value Expression\" # ----------------------------------------------------------------------", "from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, )", "<reponame>davidbrownell/DavidBrownell_TheLanguage # ---------------------------------------------------------------------- # | # | MatchValueExpression.py # | # | <NAME>", "# ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os from typing import Callable, Tuple,", "| accompanying file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. # | #", "import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of", "ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\", "AST.Node, ) -> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]: return", "CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) #", "InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with", "2): case 1, 2: \"Too low\" case 3: \"Correct\" default: \"Way off!\" )", "from typing import Callable, Tuple, Union import CommonEnvironment from CommonEnvironment import Interface from", "MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo,", "super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node, )", "# | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os", "# | accompanying file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. # |", "<NAME> 2021 # | Distributed under the Boost Software License, Version 1.0. See", "CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase", "# | <NAME> <<EMAIL>> # | 2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- #", "ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]: return cls._ExtractParserInfoImpl( MatchValueExpressionParserInfo, MatchValueCasePhraseParserInfo, node, )", "---------------------------------------------------------------------- # | # | Copyright <NAME> 2021 # | Distributed under the", "| http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os from", "ExtractParserInfo( cls, node: AST.Node, ) -> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[],", "_script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from", "Examples: str_value = ( match value Add(1, 2): case 1, 2: \"Too low\"", "....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version", "@Interface.override def ExtractParserInfo( cls, node: AST.Node, ) -> Union[ None, ParserInfo, Callable[[], ParserInfo],", "case 3: \"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME = \"Match Value Expression\"", "# | Copyright <NAME> 2021 # | Distributed under the Boost Software License,", "\"\"\" PHRASE_NAME = \"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME)", "object\"\"\" import os from typing import Callable, Tuple, Union import CommonEnvironment from CommonEnvironment", "or copy at # | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression", "Boost Software License, Version 1.0. See # | accompanying file LICENSE_1_0.txt or copy", "MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match expression.", "( match value Add(1, 2): case 1, 2: \"Too low\" case 3: \"Correct\"", "| # | Copyright <NAME> 2021 # | Distributed under the Boost Software", "version of a match expression. Examples: str_value = ( match value Add(1, 2):", ") # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match expression. Examples:", "| <NAME> <<EMAIL>> # | 2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- # |", "LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the", "# | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os from typing import", "under the Boost Software License, Version 1.0. See # | accompanying file LICENSE_1_0.txt", "off!\" ) \"\"\" PHRASE_NAME = \"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression,", "\"\"\"\\ Value-based version of a match expression. Examples: str_value = ( match value", "Software License, Version 1.0. See # | accompanying file LICENSE_1_0.txt or copy at", "...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) #", "| MatchValueExpression.py # | # | <NAME> <<EMAIL>> # | 2021-10-12 10:28:57 #", "@classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node, ) -> Union[ None, ParserInfo, Callable[[],", "default: \"Way off!\" ) \"\"\" PHRASE_NAME = \"Match Value Expression\" # ---------------------------------------------------------------------- def", "\"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod", "---------------------------------------------------------------------- def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls,", "-> Union[ None, ParserInfo, Callable[[], ParserInfo], Tuple[ParserInfo, Callable[[], ParserInfo]], ]: return cls._ExtractParserInfoImpl( MatchValueExpressionParserInfo,", "---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo", "Value-based version of a match expression. Examples: str_value = ( match value Add(1,", "os from typing import Callable, Tuple, Union import CommonEnvironment from CommonEnvironment import Interface", "self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node: AST.Node, ) -> Union[", "the MatchValueExpression object\"\"\" import os from typing import Callable, Tuple, Union import CommonEnvironment", "= CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import", "# | Distributed under the Boost Software License, Version 1.0. See # |", "1, 2: \"Too low\" case 3: \"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME", "See # | accompanying file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. #", "Version 1.0. See # | accompanying file LICENSE_1_0.txt or copy at # |", "low\" case 3: \"Correct\" default: \"Way off!\" ) \"\"\" PHRASE_NAME = \"Match Value", "\"Way off!\" ) \"\"\" PHRASE_NAME = \"Match Value Expression\" # ---------------------------------------------------------------------- def __init__(self):", "def __init__(self): super(MatchValueExpression, self).__init__(DynamicPhrasesType.Expressions, self.PHRASE_NAME) # ---------------------------------------------------------------------- @classmethod @Interface.override def ExtractParserInfo( cls, node:", "Distributed under the Boost Software License, Version 1.0. See # | accompanying file", "copy at # | http://www.boost.org/LICENSE_1_0.txt. # | # ---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\"", "accompanying file LICENSE_1_0.txt or copy at # | http://www.boost.org/LICENSE_1_0.txt. # | # ----------------------------------------------------------------------", "match value Add(1, 2): case 1, 2: \"Too low\" case 3: \"Correct\" default:", "from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath)", "CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath =", "| Distributed under the Boost Software License, Version 1.0. See # | accompanying", "<NAME> <<EMAIL>> # | 2021-10-12 10:28:57 # | # ---------------------------------------------------------------------- # | #", "Union import CommonEnvironment from CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports # ----------------------------------------------------------------------", "# | # ---------------------------------------------------------------------- # | # | Copyright <NAME> 2021 # |", "---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from", "# ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType,", "class MatchValueExpression(MatchExpressionBase): \"\"\"\\ Value-based version of a match expression. Examples: str_value = (", "os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST,", "str_value = ( match value Add(1, 2): case 1, 2: \"Too low\" case", "# ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath() _script_dir, _script_name = os.path.split(_script_fullpath) # ---------------------------------------------------------------------- with InitRelativeImports():", "with InitRelativeImports(): from ..Common.Impl.MatchExpressionBase import MatchExpressionBase from ...GrammarInfo import AST, DynamicPhrasesType, ParserInfo from", "from CommonEnvironment import Interface from CommonEnvironmentEx.Package import InitRelativeImports # ---------------------------------------------------------------------- _script_fullpath = CommonEnvironment.ThisFullpath()", "import os from typing import Callable, Tuple, Union import CommonEnvironment from CommonEnvironment import", "expression. Examples: str_value = ( match value Add(1, 2): case 1, 2: \"Too", "DynamicPhrasesType, ParserInfo from ....Parser.Expressions.MatchValueExpressionParserInfo import ( MatchValueCasePhraseParserInfo, MatchValueExpressionParserInfo, ) # ---------------------------------------------------------------------- class MatchValueExpression(MatchExpressionBase):", "---------------------------------------------------------------------- \"\"\"Contains the MatchValueExpression object\"\"\" import os from typing import Callable, Tuple, Union" ]
[ "= ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self,", "retornada nesta funcao. def add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name) is not", "action_name = action_name.lower() if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name))", "self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if events is None: possible", "e ela deveria ser retornada nesta funcao. def add_action(self, action_name): action_name = action_name.lower()", "= list(self.unused) return unused_actions def was_used(self, action_name): return action_name in self.used def __match_action(self,", "set() self.used = set() # TODO: Refactor: Deveria ter classe Action, e ela", "steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def was_used(self, action_name): return action_name", "UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) assert events is not None steps_to_execute", "','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) assert events is not", "action_name): events, parameters = self.__match_action(action_name) if events is None: possible = ','.join(list(self.actions)) raise", "self.actions.keys(): r = parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return", "steps_to_execute = '' for event in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute +=", "None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) assert", "{} self.unused = set() self.used = set() # TODO: Refactor: Deveria ter classe", "= self.__match_action(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}.", "return unused_actions def was_used(self, action_name): return action_name in self.used def __match_action(self, action_name): for", "+ '\\n' return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def was_used(self,", "parameters.items(): token_to_find = \"{\" + parameter + \"}\" step = step.replace(token_to_find, value) return", "self.actions.get(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible", "return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def was_used(self, action_name): return", "self.used.add(action_type) return self.actions[action_type], r.named return None, None def __replace_parameters(self, step, parameters): for parameter,", "events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name,", "{} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name, event): action_name =", "r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None, None def __replace_parameters(self, step, parameters):", "ser retornada nesta funcao. def add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name) is", "= self.actions.get(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}.", "= set() # TODO: Refactor: Deveria ter classe Action, e ela deveria ser", "Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name)", "Action, e ela deveria ser retornada nesta funcao. def add_action(self, action_name): action_name =", "raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name", "self.actions[action_type], r.named return None, None def __replace_parameters(self, step, parameters): for parameter, value in", "parse import parse class Actions: def __init__(self): self.actions = {} self.unused = set()", "get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def was_used(self, action_name): return action_name in self.used", "self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name, event): action_name = action_name.lower() events =", "Deveria ter classe Action, e ela deveria ser retornada nesta funcao. def add_action(self,", "Refactor: Deveria ter classe Action, e ela deveria ser retornada nesta funcao. def", "self.used = set() # TODO: Refactor: Deveria ter classe Action, e ela deveria", "= action_name.lower() if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name]", "action_name): action_name = action_name.lower() if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {} already", "is not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def", "list(self.unused) return unused_actions def was_used(self, action_name): return action_name in self.used def __match_action(self, action_name):", "possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) assert events", "None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) events.append(event)", "# TODO: Refactor: Deveria ter classe Action, e ela deveria ser retornada nesta", "= ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) assert events is", "{}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self,", "action_name.lower() events = self.actions.get(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined", "in self.actions.keys(): r = parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named", "def __init__(self): self.actions = {} self.unused = set() self.used = set() # TODO:", "def get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters", "for action_type in self.actions.keys(): r = parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return", "for parameter, value in parameters.items(): token_to_find = \"{\" + parameter + \"}\" step", "if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None, None def __replace_parameters(self, step,", "not None steps_to_execute = '' for event in events: step_event = self.__replace_parameters(event, parameters)", "self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name)", "action_name): for action_type in self.actions.keys(): r = parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type)", "parameter, value in parameters.items(): token_to_find = \"{\" + parameter + \"}\" step =", "if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values:", "action_type in self.actions.keys(): r = parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type],", "r = parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None,", "'' for event in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event +", "ter classe Action, e ela deveria ser retornada nesta funcao. def add_action(self, action_name):", "parameters) steps_to_execute += step_event + '\\n' return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused)", "action {}. Possible values: {}\".format(action_name, possible)) assert events is not None steps_to_execute =", "return action_name in self.used def __match_action(self, action_name): for action_type in self.actions.keys(): r =", "return self.actions[action_type], r.named return None, None def __replace_parameters(self, step, parameters): for parameter, value", "DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name, event): action_name", "values: {}\".format(action_name, possible)) assert events is not None steps_to_execute = '' for event", "self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n' return steps_to_execute def get_unused_actions(self): unused_actions =", "def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def was_used(self, action_name): return action_name in", "__replace_parameters(self, step, parameters): for parameter, value in parameters.items(): token_to_find = \"{\" + parameter", "import parse class Actions: def __init__(self): self.actions = {} self.unused = set() self.used", "None, None def __replace_parameters(self, step, parameters): for parameter, value in parameters.items(): token_to_find =", "__init__(self): self.actions = {} self.unused = set() self.used = set() # TODO: Refactor:", "return None, None def __replace_parameters(self, step, parameters): for parameter, value in parameters.items(): token_to_find", "step, parameters): for parameter, value in parameters.items(): token_to_find = \"{\" + parameter +", "if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = []", "+ parameter + \"}\" step = step.replace(token_to_find, value) return step class DuplicatedActionException(Exception): pass", "__match_action(self, action_name): for action_type in self.actions.keys(): r = parse(action_type, action_name) if r: self.unused.discard(action_type)", "{}. Possible values: {}\".format(action_name, possible)) assert events is not None steps_to_execute = ''", "action_name): return action_name in self.used def __match_action(self, action_name): for action_type in self.actions.keys(): r", "events: step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n' return steps_to_execute def", "not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self,", "action_name.lower() if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] =", "events is not None steps_to_execute = '' for event in events: step_event =", "funcao. def add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name) is not None: raise", "+ \"}\" step = step.replace(token_to_find, value) return step class DuplicatedActionException(Exception): pass class UndefinedActionException(Exception):", "nesta funcao. def add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name) is not None:", "parameters = self.__match_action(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action", "parse class Actions: def __init__(self): self.actions = {} self.unused = set() self.used =", "exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name, event): action_name = action_name.lower() events", "events, parameters = self.__match_action(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined", "step_event + '\\n' return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def", "from parse import parse class Actions: def __init__(self): self.actions = {} self.unused =", "TODO: Refactor: Deveria ter classe Action, e ela deveria ser retornada nesta funcao.", "action_name in self.used def __match_action(self, action_name): for action_type in self.actions.keys(): r = parse(action_type,", "[] self.unused.add(action_name) def add_event(self, action_name, event): action_name = action_name.lower() events = self.actions.get(action_name) if", "events.append(event) def get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events,", "action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if", "\"}\" step = step.replace(token_to_find, value) return step class DuplicatedActionException(Exception): pass class UndefinedActionException(Exception): pass", "= self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n' return steps_to_execute def get_unused_actions(self): unused_actions", "token_to_find = \"{\" + parameter + \"}\" step = step.replace(token_to_find, value) return step", "deveria ser retornada nesta funcao. def add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name)", "classe Action, e ela deveria ser retornada nesta funcao. def add_action(self, action_name): action_name", "None def __replace_parameters(self, step, parameters): for parameter, value in parameters.items(): token_to_find = \"{\"", "add_event(self, action_name, event): action_name = action_name.lower() events = self.actions.get(action_name) if events is None:", "{}. Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name = action_name.lower() return", "'\\n' return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions def was_used(self, action_name):", "= [] self.unused.add(action_name) def add_event(self, action_name, event): action_name = action_name.lower() events = self.actions.get(action_name)", "assert events is not None steps_to_execute = '' for event in events: step_event", "= action_name.lower() events = self.actions.get(action_name) if events is None: possible = ','.join(list(self.actions)) raise", "value in parameters.items(): token_to_find = \"{\" + parameter + \"}\" step = step.replace(token_to_find,", "None: raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name,", "values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name) def", "self.unused.add(action_name) def add_event(self, action_name, event): action_name = action_name.lower() events = self.actions.get(action_name) if events", "{}\".format(action_name, possible)) assert events is not None steps_to_execute = '' for event in", "raise DuplicatedActionException(\"Action {} already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name, event):", "unused_actions = list(self.unused) return unused_actions def was_used(self, action_name): return action_name in self.used def", "= {} self.unused = set() self.used = set() # TODO: Refactor: Deveria ter", "def add_event(self, action_name, event): action_name = action_name.lower() events = self.actions.get(action_name) if events is", "self.__match_action(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible", "steps_to_execute += step_event + '\\n' return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return", "in self.used def __match_action(self, action_name): for action_type in self.actions.keys(): r = parse(action_type, action_name)", "possible)) assert events is not None steps_to_execute = '' for event in events:", "parameters): for parameter, value in parameters.items(): token_to_find = \"{\" + parameter + \"}\"", "def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if events is None: possible =", "None steps_to_execute = '' for event in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute", "possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) events.append(event) def", "action_name = action_name.lower() events = self.actions.get(action_name) if events is None: possible = ','.join(list(self.actions))", "in parameters.items(): token_to_find = \"{\" + parameter + \"}\" step = step.replace(token_to_find, value)", "r.named return None, None def __replace_parameters(self, step, parameters): for parameter, value in parameters.items():", "action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None, None def __replace_parameters(self,", "def was_used(self, action_name): return action_name in self.used def __match_action(self, action_name): for action_type in", "parameter + \"}\" step = step.replace(token_to_find, value) return step class DuplicatedActionException(Exception): pass class", "get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters =", "= '' for event in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event", "in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n' return steps_to_execute", "def __replace_parameters(self, step, parameters): for parameter, value in parameters.items(): token_to_find = \"{\" +", "add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action {}", "action {}. Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name = action_name.lower()", "action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if events is", "= set() self.used = set() # TODO: Refactor: Deveria ter classe Action, e", "= action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if events", "self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None, None def __replace_parameters(self, step, parameters): for", "already exists\".format(action_name)) self.actions[action_name] = [] self.unused.add(action_name) def add_event(self, action_name, event): action_name = action_name.lower()", "= \"{\" + parameter + \"}\" step = step.replace(token_to_find, value) return step class", "Actions: def __init__(self): self.actions = {} self.unused = set() self.used = set() #", "event in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n' return", "step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n' return steps_to_execute def get_unused_actions(self):", "action_name, event): action_name = action_name.lower() events = self.actions.get(action_name) if events is None: possible", "raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) assert events is not None", "def __match_action(self, action_name): for action_type in self.actions.keys(): r = parse(action_type, action_name) if r:", "action_name): action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name)", "Possible values: {}\".format(action_name, possible)) assert events is not None steps_to_execute = '' for", "\"{\" + parameter + \"}\" step = step.replace(token_to_find, value) return step class DuplicatedActionException(Exception):", "= parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None, None", "set() # TODO: Refactor: Deveria ter classe Action, e ela deveria ser retornada", "self.used def __match_action(self, action_name): for action_type in self.actions.keys(): r = parse(action_type, action_name) if", "parse(action_type, action_name) if r: self.unused.discard(action_type) self.used.add(action_type) return self.actions[action_type], r.named return None, None def", "events = self.actions.get(action_name) if events is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action", "for event in events: step_event = self.__replace_parameters(event, parameters) steps_to_execute += step_event + '\\n'", "','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name):", "def add_action(self, action_name): action_name = action_name.lower() if self.actions.get(action_name) is not None: raise DuplicatedActionException(\"Action", "was_used(self, action_name): return action_name in self.used def __match_action(self, action_name): for action_type in self.actions.keys():", "self.actions = {} self.unused = set() self.used = set() # TODO: Refactor: Deveria", "class Actions: def __init__(self): self.actions = {} self.unused = set() self.used = set()", "return self.actions.get(action_name) def get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if events is None:", "+= step_event + '\\n' return steps_to_execute def get_unused_actions(self): unused_actions = list(self.unused) return unused_actions", "get_steps_to_execute(self, action_name): events, parameters = self.__match_action(action_name) if events is None: possible = ','.join(list(self.actions))", "unused_actions def was_used(self, action_name): return action_name in self.used def __match_action(self, action_name): for action_type", "is not None steps_to_execute = '' for event in events: step_event = self.__replace_parameters(event,", "event): action_name = action_name.lower() events = self.actions.get(action_name) if events is None: possible =", "ela deveria ser retornada nesta funcao. def add_action(self, action_name): action_name = action_name.lower() if", "self.unused = set() self.used = set() # TODO: Refactor: Deveria ter classe Action,", "UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible)) events.append(event) def get_action(self, action_name): action_name =", "is None: possible = ','.join(list(self.actions)) raise UndefinedActionException(\"Undefined action {}. Possible values: {}\".format(action_name, possible))", "possible)) events.append(event) def get_action(self, action_name): action_name = action_name.lower() return self.actions.get(action_name) def get_steps_to_execute(self, action_name):" ]
[ "TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\" EVE = \"eve\" TIME", "elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER return", "home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e", "= home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\" EVE =", "home from ws.handler.event.enum import Handler as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE", "OVER = \"is over\" def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return self.DAY", "return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time: return", "e == home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self, e): if e ==", "home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e", "= \"eve\" TIME = \"time\" OVER = \"is over\" def _get_str(self, e): if", "LABEL = \"Christmas\" DAY = \"day\" EVE = \"eve\" TIME = \"time\" OVER", "e): if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return", "== home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER return e def", "elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far", "\"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e == home.event.holiday.christmas.Event.Over:", "Handler as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL =", "home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time:", "Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY", "_get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return", "return self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self, e):", "== home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif", "= \"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\" EVE = \"eve\" TIME =", "\"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\" EVE = \"eve\" TIME = \"time\"", "def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve:", "import home from ws.handler.event.enum import Handler as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event", "KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\" EVE", "return e def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif", "\"time\" OVER = \"is over\" def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return", "= \"is over\" def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return self.DAY elif", "e == home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER return e", "self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self, e): if", "e == home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e", "\"eve\" TIME = \"time\" OVER = \"is over\" def _get_str(self, e): if e", "= \"time\" OVER = \"is over\" def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day:", "return self.OVER return e def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return \"fas", "\"day\" EVE = \"eve\" TIME = \"time\" OVER = \"is over\" def _get_str(self,", "== home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e ==", "fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e == home.event.holiday.christmas.Event.Over: return", "e): if e == home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE", "== home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif", "== home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif e ==", "\"Christmas\" DAY = \"day\" EVE = \"eve\" TIME = \"time\" OVER = \"is", "get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve:", "class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY =", "home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over:", "home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return", "== home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day:", "self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME", "\"is over\" def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return self.DAY elif e", "if e == home.event.holiday.christmas.Event.Day: return self.DAY elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif", "home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e == home.event.holiday.christmas.Event.Over: return \"far fa-calendar-times\" return e", "return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e ==", "= \"day\" EVE = \"eve\" TIME = \"time\" OVER = \"is over\" def", "as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\"", "ws.handler.event.enum import Handler as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\"", "self.OVER return e def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\"", "EVE = \"eve\" TIME = \"time\" OVER = \"is over\" def _get_str(self, e):", "TIME = \"time\" OVER = \"is over\" def _get_str(self, e): if e ==", "if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas", "== home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e == home.event.holiday.christmas.Event.Over: return \"far fa-calendar-times\" return", "from ws.handler.event.enum import Handler as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE =", "e def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e", "e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif e", "over\" def _get_str(self, e): if e == home.event.holiday.christmas.Event.Day: return self.DAY elif e ==", "e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\"", "return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e ==", "fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return", "home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self,", "e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\"", "elif e == home.event.holiday.christmas.Event.Over: return self.OVER return e def get_icon(self, e): if e", "self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over: return self.OVER", "def get_icon(self, e): if e == home.event.holiday.christmas.Event.Day: return \"fas fa-gifts\" elif e ==", "Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\"", "import Handler as Parent class Handler(Parent): KLASS = home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL", "= \"Christmas\" DAY = \"day\" EVE = \"eve\" TIME = \"time\" OVER =", "return self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif e == home.event.holiday.christmas.Event.Over: return", "home.event.holiday.christmas.Event TEMPLATE = \"event/enum.html\" LABEL = \"Christmas\" DAY = \"day\" EVE = \"eve\"", "\"fas fa-gifts\" elif e == home.event.holiday.christmas.Event.Eve: return \"fas fa-holly-berry\" elif e == home.event.holiday.christmas.Event.Time:", "elif e == home.event.holiday.christmas.Event.Eve: return self.EVE elif e == home.event.holiday.christmas.Event.Time: return self.TIME elif", "elif e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e == home.event.holiday.christmas.Event.Over: return \"far", "e == home.event.holiday.christmas.Event.Time: return \"far fa-calendar-check\" elif e == home.event.holiday.christmas.Event.Over: return \"far fa-calendar-times\"", "DAY = \"day\" EVE = \"eve\" TIME = \"time\" OVER = \"is over\"" ]
[ "1 if line[0] > line[-1] and line[-2] > line[-1]: local_extremas += 1 if", "line[-1] and line[0] < line[1]: local_extremas += 1 if line[0] > line[-1] and", "int(input()) if n == 0: break line = list(map(int, stdin.readline().strip().split())) local_extremas = 0", "if line[0] < line[-1] and line[0] < line[1]: local_extremas += 1 if line[0]", "range(1, len(line) - 1): if line[i] > line[i-1] and line[i] > line[i+1]: local_extremas", "if n == 0: break line = list(map(int, stdin.readline().strip().split())) local_extremas = 0 for", "line[0] > line[-1] and line[0] > line[1]: local_extremas += 1 if line[0] <", "< line[1]: local_extremas += 1 if line[0] > line[-1] and line[-2] > line[-1]:", "if line[0] > line[-1] and line[-2] > line[-1]: local_extremas += 1 if line[0]", "> line[-1]: local_extremas += 1 if line[0] < line[-1] and line[-2] < line[-1]:", "stdin.readline().strip().split())) local_extremas = 0 for i in range(1, len(line) - 1): if line[i]", "for i in range(1, len(line) - 1): if line[i] > line[i-1] and line[i]", "i in range(1, len(line) - 1): if line[i] > line[i-1] and line[i] >", "line[0] > line[1]: local_extremas += 1 if line[0] < line[-1] and line[0] <", "local_extremas = 0 for i in range(1, len(line) - 1): if line[i] >", "local_extremas += 1 if line[i] < line[i-1] and line[i] < line[i+1]: local_extremas +=", "line[i-1] and line[i] > line[i+1]: local_extremas += 1 if line[i] < line[i-1] and", "> line[-1] and line[0] > line[1]: local_extremas += 1 if line[0] < line[-1]", "1): if line[i] > line[i-1] and line[i] > line[i+1]: local_extremas += 1 if", "0: break line = list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i in range(1,", "local_extremas += 1 if line[0] < line[-1] and line[0] < line[1]: local_extremas +=", "and line[i] < line[i+1]: local_extremas += 1 if line[0] > line[-1] and line[0]", "+= 1 if line[0] > line[-1] and line[-2] > line[-1]: local_extremas += 1", "in range(1, len(line) - 1): if line[i] > line[i-1] and line[i] > line[i+1]:", "line = list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i in range(1, len(line) -", "n = int(input()) if n == 0: break line = list(map(int, stdin.readline().strip().split())) local_extremas", "list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i in range(1, len(line) - 1): if", "< line[-1] and line[0] < line[1]: local_extremas += 1 if line[0] > line[-1]", "== 0: break line = list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i in", "line[i] > line[i+1]: local_extremas += 1 if line[i] < line[i-1] and line[i] <", "< line[i+1]: local_extremas += 1 if line[0] > line[-1] and line[0] > line[1]:", "<filename>ad-hoc/p11496.py from sys import stdin, stdout while True: n = int(input()) if n", "line[i+1]: local_extremas += 1 if line[i] < line[i-1] and line[i] < line[i+1]: local_extremas", "stdout while True: n = int(input()) if n == 0: break line =", "+= 1 if line[0] < line[-1] and line[-2] < line[-1]: local_extremas += 1", "import stdin, stdout while True: n = int(input()) if n == 0: break", "if line[i] < line[i-1] and line[i] < line[i+1]: local_extremas += 1 if line[0]", "stdin, stdout while True: n = int(input()) if n == 0: break line", "< line[i-1] and line[i] < line[i+1]: local_extremas += 1 if line[0] > line[-1]", "0 for i in range(1, len(line) - 1): if line[i] > line[i-1] and", "while True: n = int(input()) if n == 0: break line = list(map(int,", "- 1): if line[i] > line[i-1] and line[i] > line[i+1]: local_extremas += 1", "sys import stdin, stdout while True: n = int(input()) if n == 0:", "= list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i in range(1, len(line) - 1):", "line[-1] and line[-2] > line[-1]: local_extremas += 1 if line[0] < line[-1] and", "and line[0] > line[1]: local_extremas += 1 if line[0] < line[-1] and line[0]", "> line[1]: local_extremas += 1 if line[0] < line[-1] and line[0] < line[1]:", "> line[i+1]: local_extremas += 1 if line[i] < line[i-1] and line[i] < line[i+1]:", "True: n = int(input()) if n == 0: break line = list(map(int, stdin.readline().strip().split()))", "local_extremas += 1 if line[0] < line[-1] and line[-2] < line[-1]: local_extremas +=", "1 if line[i] < line[i-1] and line[i] < line[i+1]: local_extremas += 1 if", "and line[i] > line[i+1]: local_extremas += 1 if line[i] < line[i-1] and line[i]", "if line[i] > line[i-1] and line[i] > line[i+1]: local_extremas += 1 if line[i]", "len(line) - 1): if line[i] > line[i-1] and line[i] > line[i+1]: local_extremas +=", "+= 1 if line[0] > line[-1] and line[0] > line[1]: local_extremas += 1", "= int(input()) if n == 0: break line = list(map(int, stdin.readline().strip().split())) local_extremas =", "local_extremas += 1 if line[0] > line[-1] and line[0] > line[1]: local_extremas +=", "> line[-1] and line[-2] > line[-1]: local_extremas += 1 if line[0] < line[-1]", "and line[-2] > line[-1]: local_extremas += 1 if line[0] < line[-1] and line[-2]", "line[i+1]: local_extremas += 1 if line[0] > line[-1] and line[0] > line[1]: local_extremas", "line[i] < line[i+1]: local_extremas += 1 if line[0] > line[-1] and line[0] >", "line[1]: local_extremas += 1 if line[0] < line[-1] and line[0] < line[1]: local_extremas", "line[0] > line[-1] and line[-2] > line[-1]: local_extremas += 1 if line[0] <", "1 if line[0] < line[-1] and line[0] < line[1]: local_extremas += 1 if", "break line = list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i in range(1, len(line)", "> line[i-1] and line[i] > line[i+1]: local_extremas += 1 if line[i] < line[i-1]", "line[-1]: local_extremas += 1 if line[0] < line[-1] and line[-2] < line[-1]: local_extremas", "line[-2] > line[-1]: local_extremas += 1 if line[0] < line[-1] and line[-2] <", "line[i-1] and line[i] < line[i+1]: local_extremas += 1 if line[0] > line[-1] and", "line[1]: local_extremas += 1 if line[0] > line[-1] and line[-2] > line[-1]: local_extremas", "1 if line[0] > line[-1] and line[0] > line[1]: local_extremas += 1 if", "+= 1 if line[0] < line[-1] and line[0] < line[1]: local_extremas += 1", "if line[0] > line[-1] and line[0] > line[1]: local_extremas += 1 if line[0]", "local_extremas += 1 if line[0] > line[-1] and line[-2] > line[-1]: local_extremas +=", "line[-1] and line[0] > line[1]: local_extremas += 1 if line[0] < line[-1] and", "and line[0] < line[1]: local_extremas += 1 if line[0] > line[-1] and line[-2]", "1 if line[0] < line[-1] and line[-2] < line[-1]: local_extremas += 1 print(local_extremas)", "line[0] < line[-1] and line[0] < line[1]: local_extremas += 1 if line[0] >", "line[i] > line[i-1] and line[i] > line[i+1]: local_extremas += 1 if line[i] <", "n == 0: break line = list(map(int, stdin.readline().strip().split())) local_extremas = 0 for i", "line[0] < line[1]: local_extremas += 1 if line[0] > line[-1] and line[-2] >", "line[i] < line[i-1] and line[i] < line[i+1]: local_extremas += 1 if line[0] >", "+= 1 if line[i] < line[i-1] and line[i] < line[i+1]: local_extremas += 1", "= 0 for i in range(1, len(line) - 1): if line[i] > line[i-1]", "from sys import stdin, stdout while True: n = int(input()) if n ==" ]
[ "time import time import os import shutil import numpy as np import torch", "masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder,", "results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over", "_, _, test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas': _, _, test_loader =", "[len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0: results", "= draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out,", "import get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import", "'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'),", "failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all = list() time_used_all = list()", "T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze())", "= list() t_err_all = list() r_err_all = list() seq_nums = config['test_split'] for seq_num", "np import torch from datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate", "os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise out['exception']", "__name__ == '__main__': torch.set_num_threads(8) parser = build_parser() args = parser.parse_args() out_folder = args.out_folder", "list() T_gt = list() T_pred = list() print('Evaluating sequence {} (len {}): {}'.format(seq_num,", "T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO':", "failed = True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all =", "'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise out['exception'] if", "failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e) failed =", "seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num,", "os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def", "_, test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas': _, _, test_loader = get_dataloaders_boreas(config)", "if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if (batchi", "mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder,", "'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction:", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True)", "argparse import json from time import time import os import shutil import numpy", "if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred))", "draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder,", "src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img =", "t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line) f.write(\"\\n\") f.write(\"mean: {}", "'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1)))", "UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint =", "print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean =", "0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0],", "batch in enumerate(test_loader): ts = time() with torch.no_grad(): out = model(batch) if out['exception']", "All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {}", "= draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out,", "src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out,", "{} (len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder)", "computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI", "r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line) f.write(\"\\n\") f.write(\"mean: {} {}\\n\".format(t_err_mean,", "os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model =", "batchi % config['vis_rate'] == 0: visualize(batchi, batch, out, config, out_vis_folder) if config['model'] ==", "torch from datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate", "'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'),", "not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config = json.load(f) config_copy =", "== 0: visualize(batchi, batch, out, config, out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21'", "src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch,", "draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder,", "deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png') if len(T_gt) > 0:", "returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0: results =", "datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero import HERO from utils.utils", "r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png') if len(T_gt)", "keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img =", "HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar,", "keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch,", "list() seq_nums = config['test_split'] for seq_num in seq_nums: config['test_split'] = [seq_num] if config['dataset']", "in zip(seq_name_all, t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line) f.write(\"\\n\")", "action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "torch.no_grad(): out = model(batch) if out['exception'] is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi))", "'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch,", "= True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True)", "{} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__", "len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr: {}", "1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0", "from networks.under_the_radar import UnderTheRadar from networks.hero import HERO from utils.utils import get_transform2, get_T_ba,", "'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'),", "model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False)", "time_used_all = list() T_gt_all = list() T_pred_all = list() t_err_all = list() r_err_all", "t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f:", "open(args.config) as f: config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config !=", "f.write('sequence name: translation error (%) rotation error (deg/m)\\n') for seq_name, t_err, r_err in", "filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def", "i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi)))", "detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img", "src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\")", "== 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag", "keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img", "_, _, test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _, test_loader =", "batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] ==", "T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err:", "not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config,", "build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true')", "config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder,", "batch, out, config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization and batchi % config['vis_rate']", "returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) /", "Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8)", "T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used:", "print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt:", "line = '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line) f.write(\"\\n\") f.write(\"mean: {} {}\\n\".format(t_err_mean, r_err_mean))", "elif config['dataset'] == 'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate':", "error (deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line = '{}:", "for w in range(config['window_size'] - 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w,", "i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out,", "weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img =", "filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img", "keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch,", "makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts = time() with torch.no_grad(): out", "out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder,", "torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str,", "out, config, out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred", "- 1: for w in range(config['window_size'] - 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze())", "print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name: translation", "seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt = list() T_pred", "'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time", "parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'),", "fail_folder) print_used_time(model) raise out['exception'] if with_visualization and batchi % config['vis_rate'] == 0: visualize(batchi,", "elif config['model'] == 'HERO': if batchi == len(test_loader) - 1: for w in", "= json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy) if", "import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False", "draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic =", "T_pred_all) print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean", "os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "{} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction'])))", "config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all'])))", "== 'oxford': _, _, test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas': _, _,", "exist_ok=True) with open(args.config) as f: config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if", "= torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e:", "radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder,", "config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization and batchi % config['vis_rate'] == 0:", "os import shutil import numpy as np import torch from datasets.oxford import get_dataloaders", "mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img", "= np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over all test sequences:') print('KITTI", "required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser", "draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1,", "with torch.no_grad(): out = model(batch) if out['exception'] is not None: fail_folder = os.path.join(out_folder,", "/ len(time_used_all))) if len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt:", "True seq_name_all = list() time_used_all = list() T_gt_all = list() T_pred_all = list()", "{}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi,", "strict=False) except Exception as e: print(e) failed = True if failed: model.load_state_dict(checkpoint, strict=False)", "model.no_throw = True seq_name_all = list() time_used_all = list() T_gt_all = list() T_pred_all", "Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching:", "r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name: translation error", "from networks.hero import HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis", "draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20)", "{}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for", "torch.set_num_threads(8) parser = build_parser() args = parser.parse_args() out_folder = args.out_folder with_visualization = not", "{}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {}", "os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img", "'w') as f: f.write('sequence name: translation error (%) rotation error (deg/m)\\n') for seq_name,", "= os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts = time()", "= computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2],", "draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img", "out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none')", "== 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze()", "config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder,", "list() print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder =", "config['dataset'] == 'oxford': _, _, test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas': _,", "True torch.backends.cudnn.deterministic = True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint',", "fname = os.path.join(out_folder, seq_name + '.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)],", "draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img", "type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1)", "out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20)", "out['exception'] if with_visualization and batchi % config['vis_rate'] == 0: visualize(batchi, batch, out, config,", "if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader):", "{} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err)", "= False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def build_parser(): parser = argparse.ArgumentParser()", "'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed =", "t_pred)) elif config['model'] == 'HERO': if batchi == len(test_loader) - 1: for w", "{} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\"", "batchi == len(test_loader) - 1: for w in range(config['window_size'] - 1): if 'T_21'", "print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png') if", "'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'),", "= draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out,", "> 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr: {} sigma_dr:", "config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder,", "= draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config,", "with open(args.config) as f: config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config", "'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder,", "print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\"", "name: translation error (%) rotation error (deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all,", "os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "if config['dataset'] == 'oxford': _, _, test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas':", "config, out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred =", "%'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png')", "get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights,", "and batchi % config['vis_rate'] == 0: visualize(batchi, batch, out, config, out_vis_folder) if config['model']", "= UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint", "out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All: {}", "plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled", "'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag =", "makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization and batchi", "(batchi + 1) % config['print_rate'] == 0: print('Eval Batch {} / {}: {:.2}s'.format(batchi,", "= get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt =", "json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy) if config['model']", "seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts = time() with torch.no_grad():", "/ {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred)", "out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none')", "os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\"", "_, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list()", "import HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences,", "from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask,", "HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'],", "= 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts)", "= get_dataloaders(config) elif config['dataset'] == 'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif config['dataset']", "if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred,", "all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder,", "from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero import HERO from", "= draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img =", "import torch from datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate import", "get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt = list()", "strict=False) model.eval() model.no_throw = True seq_name_all = list() time_used_all = list() T_gt_all =", "batchi, batch in enumerate(test_loader): ts = time() with torch.no_grad(): out = model(batch) if", "= computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err))", "config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores',", "utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark =", "as f: config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy:", "os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as", "numpy as np import torch from datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas", "test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt", "parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'),", "if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all = list() time_used_all =", "= draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\"", "'HERO': if batchi == len(test_loader) - 1: for w in range(config['window_size'] - 1):", "print(e) failed = True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all", "f: f.write('sequence name: translation error (%) rotation error (deg/m)\\n') for seq_name, t_err, r_err", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True)", "'oxford': _, _, test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas': _, _, test_loader", "print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction'])))", "len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ:", "f: config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config,", "time_used = list() T_gt = list() T_pred = list() print('Evaluating sequence {} (len", "0: print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) >", "build_parser() args = parser.parse_args() out_folder = args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True)", "== '__main__': torch.set_num_threads(8) parser = build_parser() args = parser.parse_args() out_folder = args.out_folder with_visualization", "sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI", "parser = build_parser() args = parser.parse_args() out_folder = args.out_folder with_visualization = not args.no_visualization", "time_used.append(time() - ts) if (batchi + 1) % config['print_rate'] == 0: print('Eval Batch", "draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config,", "list() t_err_all = list() r_err_all = list() seq_nums = config['test_split'] for seq_num in", "visualize(batchi, batch, out, config, out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21' in batch:", "argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str,", "os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'),", "= draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config,", "- ts) if (batchi + 1) % config['print_rate'] == 0: print('Eval Batch {}", "src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config,", "test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt = list() T_pred = list()", "test_loader.dataset.sequences[0] time_used = list() T_gt = list() T_pred = list() print('Evaluating sequence {}", "{} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png') if len(T_gt) >", "= list() r_err_all = list() seq_nums = config['test_split'] for seq_num in seq_nums: config['test_split']", "a=w, b=w+1)) time_used.append(time() - ts) if (batchi + 1) % config['print_rate'] == 0:", "import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero import", "print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err))", "config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid']))", "draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi)))", "list() time_used_all = list() T_gt_all = list() T_pred_all = list() t_err_all = list()", "config['test_split'] for seq_num in seq_nums: config['test_split'] = [seq_num] if config['dataset'] == 'oxford': _,", "filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img =", "{}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err,", "extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching'])))", "== 0: print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt)", "keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch,", "len(test_loader) - 1: for w in range(config['window_size'] - 1): if 'T_21' in batch:", "'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config,", "'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if (batchi +", "'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'),", "draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1,", "deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name: translation error (%) rotation", "exist_ok=True) def visualize(batchi, batch, out, config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1)))", "'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img =", "if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)])", "src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config,", "src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config,", "as e: print(e) failed = True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw =", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True)", "computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3]))", "print_used_time(model) raise out['exception'] if with_visualization and batchi % config['vis_rate'] == 0: visualize(batchi, batch,", "config['print_rate'] == 0: print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if", "config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20)", "in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if (batchi + 1)", "'--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True)", "np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over all test sequences:') print('KITTI t_err:", "test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'),", "a=w, b=w+1)) else: w = 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w,", "parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld',", "os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config, out_folder): radar_img = draw_radar(batch, i=1)", "T_pred_all = list() t_err_all = list() r_err_all = list() seq_nums = config['test_split'] for", "s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\"", "T_pred = list() print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization:", "filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img", "draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config,", "in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model']", "= args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config", "config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder,", "config['dataset'] == 'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _,", "def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'),", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config, out_folder): radar_img = draw_radar(batch,", "e: print(e) failed = True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True", "= draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config,", "src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map", "json from time import time import os import shutil import numpy as np", "ts = time() with torch.no_grad(): out = model(batch) if out['exception'] is not None:", "'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out,", "= list() time_used_all = list() T_gt_all = list() T_pred_all = list() t_err_all =", "UnderTheRadar from networks.hero import HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from", "def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization',", "keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img", "w = 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() -", "os.path.join(out_folder, seq_name + '.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True,", "r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred,", "for batchi, batch in enumerate(test_loader): ts = time() with torch.no_grad(): out = model(batch)", "exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization and", "os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores',", "draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config,", "print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as", "% config['vis_rate'] == 0: visualize(batchi, batch, out, config, out_vis_folder) if config['model'] == 'UnderTheRadar':", "datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero", "w in range(config['window_size'] - 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1))", "s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ ==", "return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True)", "= os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar':", "s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser = build_parser() args = parser.parse_args() out_folder", "shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO':", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True)", "in enumerate(test_loader): ts = time() with torch.no_grad(): out = model(batch) if out['exception'] is", "= time() with torch.no_grad(): out = model(batch) if out['exception'] is not None: fail_folder", "import numpy as np import torch from datasets.oxford import get_dataloaders from datasets.boreas import", "args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config = json.load(f) config_copy = os.path.join(out_folder,", "= True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all = list()", "config['model'] == 'HERO': if batchi == len(test_loader) - 1: for w in range(config['window_size']", "out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi)))", "print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser = build_parser() args", "model(batch) if out['exception'] is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder)", "t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name + '.png') if len(T_gt) > 0: plot_sequences(T_gt,", "sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w')", "range(config['window_size'] - 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w", "= False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except", "if out['exception'] is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi,", "out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi)))", "print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder,", "= [seq_num] if config['dataset'] == 'oxford': _, _, test_loader = get_dataloaders(config) elif config['dataset']", "draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores',", "'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'),", "filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1)))", "sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean", "filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature", "in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0 if 'T_21' in", "if with_visualization and batchi % config['vis_rate'] == 0: visualize(batchi, batch, out, config, out_vis_folder)", "ts) if (batchi + 1) % config['print_rate'] == 0: print('Eval Batch {} /", "0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} :", "parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True)", "'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1)))", "results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1],", "import UnderTheRadar from networks.hero import HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError", "'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch,", "draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled =", "= test_loader.dataset.sequences[0] time_used = list() T_gt = list() T_pred = list() print('Evaluating sequence", "else: w = 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time()", "'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'),", "plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) >", "error (%) rotation error (deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all):", "% config['print_rate'] == 0: print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used)", "os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model", "print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction:", "matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser", "seq_nums = config['test_split'] for seq_num in seq_nums: config['test_split'] = [seq_num] if config['dataset'] ==", "i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1)))", "visualize(batchi, batch, out, config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img =", "in seq_nums: config['test_split'] = [seq_num] if config['dataset'] == 'oxford': _, _, test_loader =", "is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out,", "t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err:", "model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e) failed = True if failed: model.load_state_dict(checkpoint,", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True)", "{} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence", "r_err_all = list() seq_nums = config['test_split'] for seq_num in seq_nums: config['test_split'] = [seq_num]", "batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0 if 'T_21' in batch:", "savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0: results = computeMedianError(T_gt_all,", "used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint", "'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img =", "visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization and batchi %", "= draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores',", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True)", "T_gt_all = list() T_pred_all = list() t_err_all = list() r_err_all = list() seq_nums", "translation error (%) rotation error (deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all, t_err_all,", "os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out,", "'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1,", "'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _, test_loader", "'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif", "T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if batchi == len(test_loader) - 1: for", "= HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try:", "draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1)))", "= list() T_pred_all = list() t_err_all = list() r_err_all = list() seq_nums =", "import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero import HERO from utils.utils import", "args.config != config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif", "True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all = list() time_used_all", "{}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics", "if __name__ == '__main__': torch.set_num_threads(8) parser = build_parser() args = parser.parse_args() out_folder =", "_, test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _, test_loader = get_dataloaders_radiate(config)", "out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask')", "out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1)))", "with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts", "np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt,", "networks.under_the_radar import UnderTheRadar from networks.hero import HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics,", "config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img", "draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder,", "args = parser.parse_args() out_folder = args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with", "T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name))", "config['dataset'] == 'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name =", "seq_nums: config['test_split'] = [seq_num] if config['dataset'] == 'oxford': _, _, test_loader = get_dataloaders(config)", "seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err,", "filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1)))", "== 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed", "fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all)", "Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization'])))", "draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out,", "for seq_num in seq_nums: config['test_split'] = [seq_num] if config['dataset'] == 'oxford': _, _,", "if (batchi + 1) % config['print_rate'] == 0: print('Eval Batch {} / {}:", "out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores',", "T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI", "computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches", "{} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt)", "batch, out, config, out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze())", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True)", "keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out,", "(len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj()", "t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name", "out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1,", "False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception", "= test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt = list() T_pred =", "== 'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0]", "keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out,", "1) % config['print_rate'] == 0: print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:])))", "{} sigma_dt: {} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all)", "draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic", "s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization:", "time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred,", "os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "{:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err", "masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img", "None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder)", "= list() seq_nums = config['test_split'] for seq_num in seq_nums: config['test_split'] = [seq_num] if", "os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "with_visualization and batchi % config['vis_rate'] == 0: visualize(batchi, batch, out, config, out_vis_folder) if", "{}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname", "seq_name = test_loader.dataset.sequences[0] time_used = list() T_gt = list() T_pred = list() print('Evaluating", "model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw = True seq_name_all = list() time_used_all = list() T_gt_all", "sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name)", "if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0 if", "try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e) failed = True if failed:", "print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder,", "in range(config['window_size'] - 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else:", "'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch,", "= list() print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name)) if with_visualization: out_vis_folder", "from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark", "map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e) failed", "detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config,", "makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True)", "def visualize(batchi, batch, out, config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img", "draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch,", "1: for w in range(config['window_size'] - 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out,", "os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config, out_folder): radar_img", "b=w+1)) else: w = 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1))", "'.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred,", "batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if (batchi + 1) %", "seq_len, seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch", "t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over all test sequences:')", "= draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img =", "= out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if batchi", "{} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average", "os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if", "0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if", "required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder,", "datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar", "args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config =", "out = model(batch) if out['exception'] is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder,", "(%) rotation error (deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line", "0: visualize(batchi, batch, out, config, out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21' in", "list() T_pred_all = list() t_err_all = list() r_err_all = list() seq_nums = config['test_split']", "get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0]", "src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch,", "seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname =", "'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out,", "t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if batchi == len(test_loader)", "seq_name_all = list() time_used_all = list() T_gt_all = list() T_pred_all = list() t_err_all", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True)", "os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config))", "if config['model'] == 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred", "config['model'] == 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred =", "T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if (batchi + 1) % config['print_rate'] ==", "'metrics.txt'), 'w') as f: f.write('sequence name: translation error (%) rotation error (deg/m)\\n') for", "'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'),", "config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy) if config['model'] ==", "draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi)))", "out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1,", "import json from time import time import os import shutil import numpy as", "i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1)", "r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {}", "out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask')", "os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return", "over all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with", "list() r_err_all = list() seq_nums = config['test_split'] for seq_num in seq_nums: config['test_split'] =", "len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err =", "0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True,", "len(time_used_all))) if len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {}", "from time import time import os import shutil import numpy as np import", "parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__':", "parser.parse_args() out_folder = args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as", "i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1))) keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none')", "time import os import shutil import numpy as np import torch from datasets.oxford", "= list() T_gt_all = list() T_pred_all = list() t_err_all = list() r_err_all =", "config['test_split'] = [seq_num] if config['dataset'] == 'oxford': _, _, test_loader = get_dataloaders(config) elif", "= get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len =", "elif config['dataset'] == 'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name", "seq_name + '.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname])", "rotation error (deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line =", "shutil import numpy as np import torch from datasets.oxford import get_dataloaders from datasets.boreas", "draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def build_parser(): parser", "keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img =", "{} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name: translation error (%)", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True)", "draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def build_parser():", "as f: f.write('sequence name: translation error (%) rotation error (deg/m)\\n') for seq_name, t_err,", "get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero import HERO from utils.utils import get_transform2,", "seq_num in seq_nums: config['test_split'] = [seq_num] if config['dataset'] == 'oxford': _, _, test_loader", "== 'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _,", "[len(T_gt)]) print('SEQ: {} : {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {}", "os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err, r_err)", "\\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True)", "out, config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization and batchi % config['vis_rate'] ==", "= draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config,", "t_err_all = list() r_err_all = list() seq_nums = config['test_split'] for seq_num in seq_nums:", "time() with torch.no_grad(): out = model(batch) if out['exception'] is not None: fail_folder =", "draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out,", "!= config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model']", "list() T_pred = list() print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name)) if", "config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid'])", "dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all)", "i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1)", "elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint,", "results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over all test", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True)", "keypoints_on_detector_scores_all_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='none') keypoints_on_detector_scores_all_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch,", "'keypoints_on_detector_scores_all/keypoints_on_detector_scores_all_{}.png'.format(batchi+1))) src_tgt_matches_img = draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out,", "'__main__': torch.set_num_threads(8) parser = build_parser() args = parser.parse_args() out_folder = args.out_folder with_visualization =", "out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if batchi ==", "'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch,", "+ '.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else:", "'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'),", "Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0: seq_name_all.append(seq_name)", "list() T_gt_all = list() T_pred_all = list() t_err_all = list() r_err_all = list()", "{} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser =", "i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1)))", "{} dr: {} sigma_dr: {}'.format(results[0], results[1], results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean =", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True)", "config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] ==", "torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def build_parser(): parser =", "results[2], results[3])) t_err_mean = np.mean(t_err_all) r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over all", "torch.backends.cudnn.deterministic = True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str,", "'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1)))", "raise out['exception'] if with_visualization and batchi % config['vis_rate'] == 0: visualize(batchi, batch, out,", "filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img =", "os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts = time() with", "get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from networks.hero import HERO", "np.mean(r_err_all) print('Average KITTI metrics over all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI", "out, config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1)", "'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred,", "for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name,", "= draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config,", "out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts =", "config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder,", "config['vis_rate'] == 0: visualize(batchi, batch, out, config, out_vis_folder) if config['model'] == 'UnderTheRadar': if", "out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if batchi == len(test_loader) - 1:", "keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out,", "test_loader = get_dataloaders_boreas(config) elif config['dataset'] == 'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len", "True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis',", "= list() T_gt = list() T_pred = list() print('Evaluating sequence {} (len {}):", "= draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img =", "get_dataloaders(config) elif config['dataset'] == 'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif config['dataset'] ==", "if batchi == len(test_loader) - 1: for w in range(config['window_size'] - 1): if", "import shutil import numpy as np import torch from datasets.oxford import get_dataloaders from", "os.makedirs(os.path.join(out_folder, 'masked_radar_vis'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder,", "(deg/m)\\n') for seq_name, t_err, r_err in zip(seq_name_all, t_err_all, r_err_all): line = '{}: {}", "print('Average KITTI metrics over all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err:", "= np.mean(r_err_all) print('Average KITTI metrics over all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean))", "== 'HERO': if batchi == len(test_loader) - 1: for w in range(config['window_size'] -", "T_gt = list() T_pred = list() print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len,", "= os.path.join(out_folder, seq_name + '.png') if len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False,", "'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0 if 'T_21'", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch,", "if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model =", "utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar,", "draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model): print(\"Time used:\") print(\" All:", "get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar", "= parser.parse_args() out_folder = args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config)", "'--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True)", "draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch, i=1) mask_img.save(os.path.join(out_folder, 'mask/mask_{}.png'.format(batchi+1))) masked_radar_img = draw_masked_radar(batch,", "draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch, out, config, i=1,", "draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked/keypoints_on_detector_scores_only_masked_{}.png'.format(batchi+1)))", "[seq_num] if config['dataset'] == 'oxford': _, _, test_loader = get_dataloaders(config) elif config['dataset'] ==", "if len(T_gt_all) > 0: results = computeMedianError(T_gt_all, T_pred_all) print('dt: {} sigma_dt: {} dr:", "> 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False,", "config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img", "parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def", "torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e)", "draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask') keypoints_on_detector_scores_only_masked_img.save(os.path.join(out_folder,", "'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img = draw_keypoints(batch,", "+ 1) % config['print_rate'] == 0: print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader),", "open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name: translation error (%) rotation error (deg/m)\\n')", "i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img", "from datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from networks.under_the_radar import UnderTheRadar from", "else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all)", "networks.hero import HERO from utils.utils import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import", "def print_used_time(model): print(\"Time used:\") print(\" All: {} s\".format(np.mean(model.time_used['all']))) print(\" Feature map extraction: {}", "map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {}", "draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def", "= os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise", "'detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'weights'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'),", "s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser = build_parser()", ": {}'.format(seq_num, seq_name)) print('KITTI t_err: {} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err)", "with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name: translation error (%) rotation error", "= out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if batchi == len(test_loader) -", "config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder,", "= config['test_split'] for seq_num in seq_nums: config['test_split'] = [seq_num] if config['dataset'] == 'oxford':", "metrics over all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean))", "False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e) failed = True if", "i=1, filtering='none') keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder,", "from datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas from datasets.radiate import get_dataloaders_radiate from", "KITTI metrics over all test sequences:') print('KITTI t_err: {} %'.format(t_err_mean)) print('KITTI r_err: {}", "r_err in zip(seq_name_all, t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line)", "print(\" Feature map extraction: {} s\".format(np.mean(model.time_used['feature_map_extraction']))) print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint", "R_pred = out['R'][0].detach().cpu().numpy().squeeze() t_pred = out['t'][0].detach().cpu().numpy().squeeze() T_pred.append(get_transform2(R_pred, t_pred)) elif config['model'] == 'HERO': if", "= True torch.backends.cudnn.deterministic = True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True)", "config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores',", "batch, out, config, out_folder): radar_img = draw_radar(batch, i=1) radar_img.save(os.path.join(out_folder, 'radar/radar_{}.png'.format(batchi+1))) mask_img = draw_mask(batch,", "test_loader = get_dataloaders(config) elif config['dataset'] == 'boreas': _, _, test_loader = get_dataloaders_boreas(config) elif", "= draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder,", "config = json.load(f) config_copy = os.path.join(out_folder, os.path.basename(args.config)) if args.config != config_copy: shutil.copy(args.config, config_copy)", "except Exception as e: print(e) failed = True if failed: model.load_state_dict(checkpoint, strict=False) model.eval()", "[len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all)", "_, _, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used =", "i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', filtering='mask')", "src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='mask') src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img =", "import argparse import json from time import time import os import shutil import", "src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_only_masked_img = draw_src_tgt_matches(batch, out,", "'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi,", "False torch.backends.cudnn.enabled = True torch.backends.cudnn.deterministic = True def build_parser(): parser = argparse.ArgumentParser() parser.add_argument('--config',", "import os import shutil import numpy as np import torch from datasets.oxford import", "'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores'),", "config, filtering='none') src_tgt_matches_all_img.save(os.path.join(out_folder, 'src_tgt_matches_all/src_tgt_matches_all_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', draw_uncertainty_scale=20) src_tgt_matches_on_detector_scores_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores/src_tgt_matches_on_detector_scores_{}.png'.format(batchi)))", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config, out_folder):", "{} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser = build_parser() args = parser.parse_args()", "zip(seq_name_all, t_err_all, r_err_all): line = '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line) f.write(\"\\n\") f.write(\"mean:", "src_tgt_matches_on_detector_scores_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_only_masked/src_tgt_matches_on_detector_scores_only_masked_{}.png'.format(batchi))) src_tgt_matches_on_detector_scores_all_img = draw_src_tgt_matches(batch, out, config, draw_on='detector_scores', filtering='none') src_tgt_matches_on_detector_scores_all_img.save(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all/src_tgt_matches_on_detector_scores_all_{}.png'.format(batchi))) def print_used_time(model):", "len(T_gt) > 0: plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)],", "plot_sequences(T_gt, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname])", "import get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores,", "Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if __name__ == '__main__': torch.set_num_threads(8) parser = build_parser() args =", "config_copy) if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model", "= True seq_name_all = list() time_used_all = list() T_gt_all = list() T_pred_all =", "enumerate(test_loader): ts = time() with torch.no_grad(): out = model(batch) if out['exception'] is not", "required=True) return parser def makedirs_for_visualization(out_folder): os.makedirs(os.path.join(out_folder, 'radar'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'mask'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'masked_radar_vis'),", "draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints, draw_src_tgt_matches torch.backends.cudnn.benchmark = False torch.backends.cudnn.enabled = True", "r_err_mean = np.mean(r_err_all) print('Average KITTI metrics over all test sequences:') print('KITTI t_err: {}", "= model(batch) if out['exception'] is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True)", "'keypoints'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_only_masked'),", "model.eval() model.no_throw = True seq_name_all = list() time_used_all = list() T_gt_all = list()", "draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img =", "b=w+1)) time_used.append(time() - ts) if (batchi + 1) % config['print_rate'] == 0: print('Eval", "T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w = 0 if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out,", "draw_uncertainty_scale=20) keypoints_img.save(os.path.join(out_folder, 'keypoints/keypoints_{}.png'.format(batchi+1))) keypoints_only_masked_img = draw_keypoints(batch, out, config, i=1, filtering='mask') keypoints_only_masked_img.save(os.path.join(out_folder, 'keypoints_only_masked/keypoints_only_masked_{}.png'.format(batchi+1))) keypoints_all_img", "extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {} s\".format(np.mean(model.time_used['optimization']))) if", "{} %'.format(t_err)) print('KITTI r_err: {} deg/m'.format(r_err)) t_err_all.append(t_err) r_err_all.append(r_err) fname = os.path.join(out_folder, seq_name +", "type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder', type=str, required=True) return parser def makedirs_for_visualization(out_folder):", "if args.config != config_copy: shutil.copy(args.config, config_copy) if config['model'] == 'UnderTheRadar': model = UnderTheRadar(config).to(config['gpuid'])", "'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img =", "out_folder = args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f:", "- 1): if 'T_21' in batch: T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) else: w =", "print('Eval Batch {} / {}: {:.2}s'.format(batchi, len(test_loader), np.mean(time_used[-config['print_rate']:]))) time_used_all.extend(time_used) if len(T_gt) > 0:", "= not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config = json.load(f) config_copy", "exist_ok=True) os.makedirs(os.path.join(out_folder, 'keypoints_on_detector_scores_all'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_all'), exist_ok=True)", "= build_parser() args = parser.parse_args() out_folder = args.out_folder with_visualization = not args.no_visualization os.makedirs(out_folder,", "os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model) raise out['exception'] if with_visualization", "weights_img = draw_weights(out, i=1) weights_img.save(os.path.join(out_folder, 'weights/weights_{}.png'.format(batchi+1))) keypoints_img = draw_keypoints(batch, out, config, i=1, draw_uncertainty_scale=20)", "seq_name)) if with_visualization: out_vis_folder = os.path.join(out_folder, seq_name) makedirs_for_visualization(out_vis_folder) model.solver.solver_cpp.resetTraj() for batchi, batch in", "= '{}: {} {}\\n'.format(seq_name, t_err, r_err) f.write(line) f.write(\"\\n\") f.write(\"mean: {} {}\\n\".format(t_err_mean, r_err_mean)) print_used_time(model)", "computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\ draw_weights, draw_keypoints,", "savePDF=True, fnames=[fname]) else: plot_sequences(T_pred, T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all)))", "keypoints_all_img.save(os.path.join(out_folder, 'keypoints_all/keypoints_all_{}.png'.format(batchi+1))) keypoints_on_detector_scores_img = draw_keypoints(batch, out, config, i=1, draw_on='detector_scores', draw_uncertainty_scale=20) keypoints_on_detector_scores_img.save(os.path.join(out_folder, 'keypoints_on_detector_scores/keypoints_on_detector_scores_{}.png'.format(batchi+1))) keypoints_on_detector_scores_only_masked_img", "= draw_masked_radar(batch, i=1) masked_radar_img.save(os.path.join(out_folder, 'masked_radar_vis/masked_radar_vis_{}.png'.format(batchi+1))) detector_scores_img = draw_detector_scores(out, i=1) detector_scores_img.save(os.path.join(out_folder, 'detector_scores/detector_scores_{}.png'.format(batchi+1))) weights_img =", "get_transform2, get_T_ba, computeKittiMetrics, computeMedianError from utils.vis import plot_sequences, draw_radar, draw_mask, draw_masked_radar, draw_detector_scores, \\", "print(\" Keypoint extraction: {} s\".format(np.mean(model.time_used['keypoint_extraction']))) print(\" Keypoint matching: {} s\".format(np.mean(model.time_used['keypoint_matching']))) print(\" Optimization: {}", "model.solver.solver_cpp.resetTraj() for batchi, batch in enumerate(test_loader): ts = time() with torch.no_grad(): out =", "= list() T_pred = list() print('Evaluating sequence {} (len {}): {}'.format(seq_num, seq_len, seq_name))", "%'.format(t_err_mean)) print('KITTI r_err: {} deg/m'.format(r_err_mean)) with open(os.path.join(out_folder, 'metrics.txt'), 'w') as f: f.write('sequence name:", "out['exception'] is not None: fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch,", "out_vis_folder) if config['model'] == 'UnderTheRadar': if 'T_21' in batch: T_gt.append(batch['T_21'][0].numpy().squeeze()) R_pred = out['R'][0].detach().cpu().numpy().squeeze()", "T_gt.append(batch['T_21'][w].numpy().squeeze()) T_pred.append(get_T_ba(out, a=w, b=w+1)) time_used.append(time() - ts) if (batchi + 1) % config['print_rate']", "fail_folder = os.path.join(out_folder, 'failed_{}'.format(batchi)) os.makedirs(fail_folder, exist_ok=True) makedirs_for_visualization(fail_folder) visualize(batchi, batch, out, config, fail_folder) print_used_time(model)", "model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False checkpoint = torch.load(args.checkpoint, map_location=torch.device(config['gpuid'])) failed = False", "as np import torch from datasets.oxford import get_dataloaders from datasets.boreas import get_dataloaders_boreas from", "Exception as e: print(e) failed = True if failed: model.load_state_dict(checkpoint, strict=False) model.eval() model.no_throw", "model = UnderTheRadar(config).to(config['gpuid']) elif config['model'] == 'HERO': model = HERO(config).to(config['gpuid']) model.solver.sliding_flag = False", "T_pred, [len(T_pred)], returnTensor=False, savePDF=True, fnames=[fname]) print('time_used: {}'.format(sum(time_used_all) / len(time_used_all))) if len(T_gt_all) > 0:", "= draw_src_tgt_matches(batch, out, config, draw_uncertainty_scale=20) src_tgt_matches_img.save(os.path.join(out_folder, 'src_tgt_matches/src_tgt_matches_{}.png'.format(batchi))) src_tgt_matches_only_masked_img = draw_src_tgt_matches(batch, out, config, filtering='mask')", "'radiate': _, _, test_loader = get_dataloaders_radiate(config) seq_len = test_loader.dataset.seq_lens[0] seq_name = test_loader.dataset.sequences[0] time_used", "import time import os import shutil import numpy as np import torch from", "= draw_src_tgt_matches(batch, out, config, filtering='mask') src_tgt_matches_only_masked_img.save(os.path.join(out_folder, 'src_tgt_matches_only_masked/src_tgt_matches_only_masked_{}.png'.format(batchi))) src_tgt_matches_all_img = draw_src_tgt_matches(batch, out, config, filtering='none')", "'src_tgt_matches_on_detector_scores_only_masked'), exist_ok=True) os.makedirs(os.path.join(out_folder, 'src_tgt_matches_on_detector_scores_all'), exist_ok=True) def visualize(batchi, batch, out, config, out_folder): radar_img =", "== len(test_loader) - 1: for w in range(config['window_size'] - 1): if 'T_21' in", "with_visualization = not args.no_visualization os.makedirs(out_folder, exist_ok=True) with open(args.config) as f: config = json.load(f)", "= False try: model.load_state_dict(checkpoint['model_state_dict'], strict=False) except Exception as e: print(e) failed = True", "= argparse.ArgumentParser() parser.add_argument('--config', type=str, required=True) parser.add_argument('--checkpoint', type=str, required=True) parser.add_argument('-no-vis', '--no-visualization', action='store_true') parser.add_argument('-out-fld', '--out-folder',", "> 0: seq_name_all.append(seq_name) T_gt_all.extend(T_gt) T_pred_all.extend(T_pred) t_err, r_err = computeKittiMetrics(T_gt, T_pred, [len(T_gt)]) print('SEQ: {}" ]
[ "this measures not the tangential vector, but rotational vector. \"\"\" # Type check", "# The vector is tangent to a circle made by r, thus the", "angle. B_x = B_t * np.cos(B_angle) B_y = B_t * np.sin(B_angle) # Return", "is not phi. B_angle = phi_subaxis + np.pi/2 # Calculate the components of", "rotational vector. \"\"\" # Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi =", "is tangent to a circle made by r, thus the angle is related", "import Robustness.validation as valid import gaussian_fitting as gaussfit import bessel_fitting as bessfit import", "and the angle. B_x = B_t * np.cos(B_angle) B_y = B_t * np.sin(B_angle)", "np.zeros_like(r) # Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\"", "the rotational vector, but tangential vector. The tangential axis is the axis of", "divergence. However, this measures not the rotational vector, but tangential vector. The tangential", "r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the magnitude of the tangential", "fields without any divergence. However, this measures not the rotational vector, but tangential", "not phi. B_angle = phi_subaxis + np.pi/2 # Calculate the components of the", "# magnitude and the angle. B_x = B_t * np.cos(B_angle) B_y = B_t", "without any divergence. However, this measures not the tangential vector, but rotational vector.", "vector, but rotational vector. \"\"\" # Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0)", "any divergence. However, this measures not the rotational vector, but tangential vector. The", "the angle is related to # phi, but is not phi. B_angle =", "as sp import scipy.special as sp_spcl import matplotlib.pyplot as plt from Robustness.exception import", "Robustness.exception import * import Robustness.validation as valid import gaussian_fitting as gaussfit import bessel_fitting", "fields, in a way, fields without any divergence. However, this measures not the", "scipy.special as sp_spcl import matplotlib.pyplot as plt from Robustness.exception import * import Robustness.validation", "which is the axis of rotation for the field. Assume that the positive", "plt from Robustness.exception import * import Robustness.validation as valid import gaussian_fitting as gaussfit", "import numpy as np import scipy as sp import scipy.special as sp_spcl import", "makes circular magnetic fields, in a way, fields without any divergence. However, this", "gaussfit import bessel_fitting as bessfit import misc_functions as misc def circular_magnetic_field_cyln(r, phi, z,", "of which is the axis of rotation for the field. Assume that the", "= propagation_function(r) B_z = np.zeros_like(r) # Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x,", "propagation_function(r) B_z = np.zeros_like(r) # Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y,", "However, this measures not the rotational vector, but tangential vector. The tangential axis", "# Convert to a polar system for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis", "valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) #", "z, also, only the r value # matters in this function. B_r =", "its invariantness in phi and z, also, only the r value # matters", "# matters in this function. B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z =", "check x = valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to a polar system", "circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic fields, in a way,", "y = valid.validate_float_array(y) # Convert to a polar system for tangential vector. r_subaxis", "the angle. B_x = B_t * np.cos(B_angle) B_y = B_t * np.sin(B_angle) #", "y, propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic fields, in a way, fields", "axis is the axis of which is the axis of rotation for the", "vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the magnitude of the", "propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic fields, in a way, fields without", "Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi)", "a way, fields without any divergence. However, this measures not the rotational vector,", "magnetic fields, in a way, fields without any divergence. However, this measures not", "magnitude of the tangential vector. B_t = propagation_function(r_subaxis) # The vector is tangent", "np.pi/2 # Calculate the components of the magnetic field vector based on the", "a way, fields without any divergence. However, this measures not the tangential vector,", "as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This makes circular magnetic", "However, this measures not the tangential vector, but rotational vector. \"\"\" # Type", "= valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because of its invariantness", "tangential vector, but rotational vector. \"\"\" # Type check r = valid.validate_float_array(r, deep_validate=True,", "np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the magnitude of the tangential vector. B_t", "fields without any divergence. However, this measures not the tangential vector, but rotational", "# Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This", "the field. Assume that the positive direction is pointing to the user. \"\"\"", "Robustness.validation as valid import gaussian_fitting as gaussfit import bessel_fitting as bessfit import misc_functions", "* import Robustness.validation as valid import gaussian_fitting as gaussfit import bessel_fitting as bessfit", "deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because", "\"\"\" # Type check x = valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to", "the positive direction is pointing to the user. \"\"\" # Type check x", "system for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the", "This makes circular magnetic fields, in a way, fields without any divergence. However,", "of the tangential vector. B_t = propagation_function(r_subaxis) # The vector is tangent to", "r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z =", "by r, thus the angle is related to # phi, but is not", "axis of rotation for the field. Assume that the positive direction is pointing", "but is not phi. B_angle = phi_subaxis + np.pi/2 # Calculate the components", "function. B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r) # Return return", "matters in this function. B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r)", "the r value # matters in this function. B_r = np.zeros_like(r) B_phi =", "components of the magnetic field vector based on the # magnitude and the", "as bessfit import misc_functions as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\"", "the tangential vector. B_t = propagation_function(r_subaxis) # The vector is tangent to a", "is the axis of rotation for the field. Assume that the positive direction", "in a way, fields without any divergence. However, this measures not the rotational", "inspect import numpy as np import scipy as sp import scipy.special as sp_spcl", "to the user. \"\"\" # Type check x = valid.validate_float_array(x) y = valid.validate_float_array(y)", "np import scipy as sp import scipy.special as sp_spcl import matplotlib.pyplot as plt", "phi_subaxis = np.arctan2(y,x) # Calculate the magnitude of the tangential vector. B_t =", "to a circle made by r, thus the angle is related to #", "+ np.pi/2 # Calculate the components of the magnetic field vector based on", "# Calculate the magnitude of the tangential vector. B_t = propagation_function(r_subaxis) # The", "B_x = B_t * np.cos(B_angle) B_y = B_t * np.sin(B_angle) # Return return", "phi, but is not phi. B_angle = phi_subaxis + np.pi/2 # Calculate the", "rotation for the field. Assume that the positive direction is pointing to the", "less_than=2*np.pi) z = valid.validate_float_array(z) # Because of its invariantness in phi and z,", "Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes", "magnitude and the angle. B_x = B_t * np.cos(B_angle) B_y = B_t *", "check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z", "= valid.validate_float_array(z) # Because of its invariantness in phi and z, also, only", "= propagation_function(r_subaxis) # The vector is tangent to a circle made by r,", "the axis of which is the axis of rotation for the field. Assume", "B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r) # Return return B_r,", "= valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to a polar system for tangential", "in a way, fields without any divergence. However, this measures not the tangential", "import scipy.special as sp_spcl import matplotlib.pyplot as plt from Robustness.exception import * import", "user. \"\"\" # Type check x = valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert", "np.arctan2(y,x) # Calculate the magnitude of the tangential vector. B_t = propagation_function(r_subaxis) #", "B_t * np.cos(B_angle) B_y = B_t * np.sin(B_angle) # Return return B_x, B_y", "z = valid.validate_float_array(z) # Because of its invariantness in phi and z, also,", "phi. B_angle = phi_subaxis + np.pi/2 # Calculate the components of the magnetic", "= np.zeros_like(r) # Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'):", "circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This makes circular magnetic fields, in a", "rotational vector, but tangential vector. The tangential axis is the axis of which", "from Robustness.exception import * import Robustness.validation as valid import gaussian_fitting as gaussfit import", "vector, but tangential vector. The tangential axis is the axis of which is", "B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic", "as plt from Robustness.exception import * import Robustness.validation as valid import gaussian_fitting as", "import scipy as sp import scipy.special as sp_spcl import matplotlib.pyplot as plt from", "pointing to the user. \"\"\" # Type check x = valid.validate_float_array(x) y =", "the magnetic field vector based on the # magnitude and the angle. B_x", "<reponame>psmd-iberutaru/Akamai_Internship import inspect import numpy as np import scipy as sp import scipy.special", "measures not the rotational vector, but tangential vector. The tangential axis is the", "magnetic field vector based on the # magnitude and the angle. B_x =", "= np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the magnitude of the tangential vector.", "numpy as np import scipy as sp import scipy.special as sp_spcl import matplotlib.pyplot", "way, fields without any divergence. However, this measures not the rotational vector, but", "= phi_subaxis + np.pi/2 # Calculate the components of the magnetic field vector", "scipy as sp import scipy.special as sp_spcl import matplotlib.pyplot as plt from Robustness.exception", "invariantness in phi and z, also, only the r value # matters in", "import matplotlib.pyplot as plt from Robustness.exception import * import Robustness.validation as valid import", "): \"\"\" This makes circular magnetic fields, in a way, fields without any", "as np import scipy as sp import scipy.special as sp_spcl import matplotlib.pyplot as", "circular magnetic fields, in a way, fields without any divergence. However, this measures", "\"\"\" This makes circular magnetic fields, in a way, fields without any divergence.", "valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because of its invariantness in", "np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r) # Return return B_r, B_phi, B_z", "this measures not the rotational vector, but tangential vector. The tangential axis is", "r value # matters in this function. B_r = np.zeros_like(r) B_phi = propagation_function(r)", "B_t = propagation_function(r_subaxis) # The vector is tangent to a circle made by", "field. Assume that the positive direction is pointing to the user. \"\"\" #", "Type check x = valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to a polar", "circle made by r, thus the angle is related to # phi, but", "x = valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to a polar system for", "# Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0,", "valid import gaussian_fitting as gaussfit import bessel_fitting as bessfit import misc_functions as misc", "= valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z)", "def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic fields, in a", "vector. The tangential axis is the axis of which is the axis of", "B_z = np.zeros_like(r) # Return return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function,", "of rotation for the field. Assume that the positive direction is pointing to", "to # phi, but is not phi. B_angle = phi_subaxis + np.pi/2 #", "= np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r) # Return return B_r, B_phi,", "r, thus the angle is related to # phi, but is not phi.", "field vector based on the # magnitude and the angle. B_x = B_t", "to a polar system for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x)", "phi_subaxis + np.pi/2 # Calculate the components of the magnetic field vector based", "in phi and z, also, only the r value # matters in this", "tangential vector. B_t = propagation_function(r_subaxis) # The vector is tangent to a circle", "Calculate the magnitude of the tangential vector. B_t = propagation_function(r_subaxis) # The vector", "not the rotational vector, but tangential vector. The tangential axis is the axis", "= np.arctan2(y,x) # Calculate the magnitude of the tangential vector. B_t = propagation_function(r_subaxis)", "bessfit import misc_functions as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This", "and z, also, only the r value # matters in this function. B_r", "made by r, thus the angle is related to # phi, but is", "greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because of its invariantness in phi and", "# Because of its invariantness in phi and z, also, only the r", "import * import Robustness.validation as valid import gaussian_fitting as gaussfit import bessel_fitting as", "only the r value # matters in this function. B_r = np.zeros_like(r) B_phi", "on the # magnitude and the angle. B_x = B_t * np.cos(B_angle) B_y", "= B_t * np.cos(B_angle) B_y = B_t * np.sin(B_angle) # Return return B_x,", "B_phi = propagation_function(r) B_z = np.zeros_like(r) # Return return B_r, B_phi, B_z def", "vector based on the # magnitude and the angle. B_x = B_t *", "is related to # phi, but is not phi. B_angle = phi_subaxis +", "vector. \"\"\" # Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi,", "valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to a polar system for tangential vector.", "in this function. B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r) #", "axis of which is the axis of rotation for the field. Assume that", "the user. \"\"\" # Type check x = valid.validate_float_array(x) y = valid.validate_float_array(y) #", "not the tangential vector, but rotational vector. \"\"\" # Type check r =", "deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because of its invariantness in phi", "= valid.validate_float_array(y) # Convert to a polar system for tangential vector. r_subaxis =", "way, fields without any divergence. However, this measures not the tangential vector, but", "tangential_axis='z'): \"\"\" This makes circular magnetic fields, in a way, fields without any", "phi, z, propagation_function, ): \"\"\" This makes circular magnetic fields, in a way,", "Convert to a polar system for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis =", "The vector is tangent to a circle made by r, thus the angle", "propagation_function, ): \"\"\" This makes circular magnetic fields, in a way, fields without", "as gaussfit import bessel_fitting as bessfit import misc_functions as misc def circular_magnetic_field_cyln(r, phi,", "propagation_function(r_subaxis) # The vector is tangent to a circle made by r, thus", "a circle made by r, thus the angle is related to # phi,", "import bessel_fitting as bessfit import misc_functions as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function,", "the tangential vector, but rotational vector. \"\"\" # Type check r = valid.validate_float_array(r,", "that the positive direction is pointing to the user. \"\"\" # Type check", "of its invariantness in phi and z, also, only the r value #", "the # magnitude and the angle. B_x = B_t * np.cos(B_angle) B_y =", "valid.validate_float_array(z) # Because of its invariantness in phi and z, also, only the", "vector. B_t = propagation_function(r_subaxis) # The vector is tangent to a circle made", "matplotlib.pyplot as plt from Robustness.exception import * import Robustness.validation as valid import gaussian_fitting", "B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic fields,", "gaussian_fitting as gaussfit import bessel_fitting as bessfit import misc_functions as misc def circular_magnetic_field_cyln(r,", "B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes circular magnetic fields, in", "The tangential axis is the axis of which is the axis of rotation", "import misc_functions as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This makes", "the axis of rotation for the field. Assume that the positive direction is", "for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the magnitude", "angle is related to # phi, but is not phi. B_angle = phi_subaxis", "but rotational vector. \"\"\" # Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi", "# Type check x = valid.validate_float_array(x) y = valid.validate_float_array(y) # Convert to a", "Assume that the positive direction is pointing to the user. \"\"\" # Type", "tangent to a circle made by r, thus the angle is related to", "of the magnetic field vector based on the # magnitude and the angle.", "as sp_spcl import matplotlib.pyplot as plt from Robustness.exception import * import Robustness.validation as", "is the axis of which is the axis of rotation for the field.", "positive direction is pointing to the user. \"\"\" # Type check x =", "# phi, but is not phi. B_angle = phi_subaxis + np.pi/2 # Calculate", "related to # phi, but is not phi. B_angle = phi_subaxis + np.pi/2", "any divergence. However, this measures not the tangential vector, but rotational vector. \"\"\"", "phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because of its", "the magnitude of the tangential vector. B_t = propagation_function(r_subaxis) # The vector is", "thus the angle is related to # phi, but is not phi. B_angle", "the components of the magnetic field vector based on the # magnitude and", "measures not the tangential vector, but rotational vector. \"\"\" # Type check r", "greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True, greater_than=0, less_than=2*np.pi) z = valid.validate_float_array(z) # Because of", "divergence. However, this measures not the tangential vector, but rotational vector. \"\"\" #", "Calculate the components of the magnetic field vector based on the # magnitude", "sp_spcl import matplotlib.pyplot as plt from Robustness.exception import * import Robustness.validation as valid", "valid.validate_float_array(y) # Convert to a polar system for tangential vector. r_subaxis = np.hypot(x,y)", "a polar system for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) #", "polar system for tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate", "# Calculate the components of the magnetic field vector based on the #", "tangential vector. The tangential axis is the axis of which is the axis", "return B_r, B_phi, B_z def circular_magnetic_field_cart(x, y, propagation_function, tangential_axis='z'): \"\"\" This makes circular", "vector is tangent to a circle made by r, thus the angle is", "phi and z, also, only the r value # matters in this function.", "z, propagation_function, ): \"\"\" This makes circular magnetic fields, in a way, fields", "value # matters in this function. B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z", "as valid import gaussian_fitting as gaussfit import bessel_fitting as bessfit import misc_functions as", "Because of its invariantness in phi and z, also, only the r value", "based on the # magnitude and the angle. B_x = B_t * np.cos(B_angle)", "this function. B_r = np.zeros_like(r) B_phi = propagation_function(r) B_z = np.zeros_like(r) # Return", "misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This makes circular magnetic fields,", "is pointing to the user. \"\"\" # Type check x = valid.validate_float_array(x) y", "import inspect import numpy as np import scipy as sp import scipy.special as", "but tangential vector. The tangential axis is the axis of which is the", "also, only the r value # matters in this function. B_r = np.zeros_like(r)", "for the field. Assume that the positive direction is pointing to the user.", "\"\"\" # Type check r = valid.validate_float_array(r, deep_validate=True, greater_than=0) phi = valid.validate_float_array(phi, deep_validate=True,", "tangential axis is the axis of which is the axis of rotation for", "bessel_fitting as bessfit import misc_functions as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ):", "misc_functions as misc def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This makes circular", "def circular_magnetic_field_cyln(r, phi, z, propagation_function, ): \"\"\" This makes circular magnetic fields, in", "without any divergence. However, this measures not the rotational vector, but tangential vector.", "tangential vector. r_subaxis = np.hypot(x,y) phi_subaxis = np.arctan2(y,x) # Calculate the magnitude of", "sp import scipy.special as sp_spcl import matplotlib.pyplot as plt from Robustness.exception import *", "direction is pointing to the user. \"\"\" # Type check x = valid.validate_float_array(x)", "import gaussian_fitting as gaussfit import bessel_fitting as bessfit import misc_functions as misc def", "B_angle = phi_subaxis + np.pi/2 # Calculate the components of the magnetic field" ]
[ "django.contrib import admin from .models import * # Register your models here. admin.site.register(Vehicle)", "admin from .models import * # Register your models here. admin.site.register(Vehicle) admin.site.register(VehicleLogging) admin.site.register(RegisteredUserLogging)", "import admin from .models import * # Register your models here. admin.site.register(Vehicle) admin.site.register(VehicleLogging)", "from .models import * # Register your models here. admin.site.register(Vehicle) admin.site.register(VehicleLogging) admin.site.register(RegisteredUserLogging) admin.site.register(VisitorUserLogging)", "from django.contrib import admin from .models import * # Register your models here." ]
[ "] operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ),", "model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')],", "3.1.7 on 2021-11-19 23:36 from django.db import migrations import model_utils.fields class Migration(migrations.Migration): dependencies", "operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField(", "23:36 from django.db import migrations import model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments',", "on 2021-11-19 23:36 from django.db import migrations import model_utils.fields class Migration(migrations.Migration): dependencies =", "import model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations = [", "[ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status',", "migrations import model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations =", "migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0,", "default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ),", "by Django 3.1.7 on 2021-11-19 23:36 from django.db import migrations import model_utils.fields class", "Generated by Django 3.1.7 on 2021-11-19 23:36 from django.db import migrations import model_utils.fields", "name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started',", "max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField(", "('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100,", "no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result',", "= [ ('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')],", "'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True),", "class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery',", "[ ('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template',", "model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField(", "= [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment',", "'0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True),", "# Generated by Django 3.1.7 on 2021-11-19 23:36 from django.db import migrations import", "), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status',", "django.db import migrations import model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ]", "migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0,", "model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')],", "import migrations import model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations", "dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery', name='status', field=model_utils.fields.StatusField(choices=[(0,", "default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ),", "field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100,", "'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True),", "field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100,", "from django.db import migrations import model_utils.fields class Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'),", "2021-11-19 23:36 from django.db import migrations import model_utils.fields class Migration(migrations.Migration): dependencies = [", "Migration(migrations.Migration): dependencies = [ ('experiments', '0012_repoorigin_name'), ] operations = [ migrations.AddField( model_name='battery', name='status',", "name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='template', max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='assignment', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started',", "max_length=100, no_check_for_status=True), ), migrations.AlterField( model_name='result', name='status', field=model_utils.fields.StatusField(choices=[(0, 'dummy')], default='not-started', max_length=100, no_check_for_status=True), ), ]", "Django 3.1.7 on 2021-11-19 23:36 from django.db import migrations import model_utils.fields class Migration(migrations.Migration):" ]
[ "python from distutils.core import setup setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>', url='aitroopers.com',", "from distutils.core import setup setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>', url='aitroopers.com', packages=['aifin'],", "import setup setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>', url='aitroopers.com', packages=['aifin'], install_requires=[ 'pandas','scipy'", "distutils.core import setup setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>', url='aitroopers.com', packages=['aifin'], install_requires=[", "setup setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>', url='aitroopers.com', packages=['aifin'], install_requires=[ 'pandas','scipy' ]", "#!/usr/bin/env python from distutils.core import setup setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>',", "setup(name='aifin', version='1.0.1', description='Python Distribution Utilities', author='<NAME>', author_email='<EMAIL>', url='aitroopers.com', packages=['aifin'], install_requires=[ 'pandas','scipy' ] )" ]
[ "if in_list[s_idx] <= base_val: final_idx = s_idx else: final_idx = s_idx - 1", "NoReturn def quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn: if e_idx >", "= e_idx base_idx = int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] =", "s_idx += 1 while e_idx > s_idx: if in_list[s_idx] <= base_val: s_idx +=", "[15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2, 3, 12, 12, 15, 23,", "e_idx -= 1 if in_list[s_idx] <= base_val: final_idx = s_idx else: final_idx =", "if in_list[s_idx] <= base_val: s_idx += 1 continue if in_list[e_idx] > base_val: e_idx", "in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx > s_idx: if in_list[s_idx]", "base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1", "else: final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx,", "-> NoReturn: if e_idx > s_idx: first_idx = s_idx last_idx = e_idx base_idx", "+ e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1", "in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -= 1 if in_list[s_idx] <= base_val: final_idx", "3, 12, 12, 15, 23, 33, 54, 73, 87, 89, 94, 162] print(\"Test", "1 while e_idx > s_idx: if in_list[s_idx] <= base_val: s_idx += 1 continue", "= in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -= 1 if in_list[s_idx] <= base_val:", "> base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx +=", "in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\": in_list", "e_idx base_idx = int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx],", "== \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2, 3,", "+= 1 while e_idx > s_idx: if in_list[s_idx] <= base_val: s_idx += 1", "last_idx = e_idx base_idx = int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx]", "s_idx: int, e_idx: int) -> NoReturn: if e_idx > s_idx: first_idx = s_idx", "e_idx > s_idx: first_idx = s_idx last_idx = e_idx base_idx = int((s_idx +", "s_idx += 1 e_idx -= 1 if in_list[s_idx] <= base_val: final_idx = s_idx", "if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list ==", "base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx", "def quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn: if e_idx > s_idx:", "in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -= 1 if in_list[s_idx] <=", "List[int], s_idx: int, e_idx: int) -> NoReturn: if e_idx > s_idx: first_idx =", "List, NoReturn def quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn: if e_idx", "quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn: if e_idx > s_idx: first_idx", "typing import List, NoReturn def quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn:", "quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2, 3, 12, 12, 15, 23, 33,", "in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx > s_idx:", "+= 1 e_idx -= 1 if in_list[s_idx] <= base_val: final_idx = s_idx else:", "assert in_list == [2, 3, 12, 12, 15, 23, 33, 54, 73, 87,", "\"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2, 3, 12,", "= s_idx else: final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx]", "in_list[first_idx] s_idx += 1 while e_idx > s_idx: if in_list[s_idx] <= base_val: s_idx", "- 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx)", "s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1,", "= in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx > s_idx: if in_list[s_idx] <=", "-= 1 if in_list[s_idx] <= base_val: final_idx = s_idx else: final_idx = s_idx", "in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2, 3, 12, 12,", "if e_idx > s_idx: first_idx = s_idx last_idx = e_idx base_idx = int((s_idx", "final_idx = s_idx else: final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx],", "[2, 3, 12, 12, 15, 23, 33, 54, 73, 87, 89, 94, 162]", "continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -= 1 if", "in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ ==", "s_idx: first_idx = s_idx last_idx = e_idx base_idx = int((s_idx + e_idx)/2) base_val", "<= base_val: final_idx = s_idx else: final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx]", "s_idx last_idx = e_idx base_idx = int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx],", "-= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -=", "s_idx += 1 continue if in_list[e_idx] > base_val: e_idx -= 1 continue in_list[s_idx],", "if in_list[e_idx] > base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx]", "= [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2, 3, 12, 12, 15,", "s_idx else: final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list,", "last_idx) if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list", "in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__", "in_list[s_idx] s_idx += 1 e_idx -= 1 if in_list[s_idx] <= base_val: final_idx =", "int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx +=", "12, 12, 15, 23, 33, 54, 73, 87, 89, 94, 162] print(\"Test succeeded.\")", "quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33]", "in_list == [2, 3, 12, 12, 15, 23, 33, 54, 73, 87, 89,", "in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -= 1 if in_list[s_idx]", "in_list[s_idx] <= base_val: s_idx += 1 continue if in_list[e_idx] > base_val: e_idx -=", "1 if in_list[s_idx] <= base_val: final_idx = s_idx else: final_idx = s_idx -", "continue if in_list[e_idx] > base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx],", "import List, NoReturn def quick_sort(in_list: List[int], s_idx: int, e_idx: int) -> NoReturn: if", "> s_idx: if in_list[s_idx] <= base_val: s_idx += 1 continue if in_list[e_idx] >", "1 e_idx -= 1 if in_list[s_idx] <= base_val: final_idx = s_idx else: final_idx", "= in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx >", "e_idx: int) -> NoReturn: if e_idx > s_idx: first_idx = s_idx last_idx =", "1 continue if in_list[e_idx] > base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx] =", "in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx > s_idx: if", "base_idx = int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx]", "first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list,", "final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0,", "final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1)", "NoReturn: if e_idx > s_idx: first_idx = s_idx last_idx = e_idx base_idx =", "len(in_list)-1) assert in_list == [2, 3, 12, 12, 15, 23, 33, 54, 73,", "s_idx: if in_list[s_idx] <= base_val: s_idx += 1 continue if in_list[e_idx] > base_val:", "base_val: s_idx += 1 continue if in_list[e_idx] > base_val: e_idx -= 1 continue", "in_list[e_idx] > base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx", "final_idx+1, last_idx) if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert", "in_list[base_idx], in_list[first_idx] s_idx += 1 while e_idx > s_idx: if in_list[s_idx] <= base_val:", "e_idx > s_idx: if in_list[s_idx] <= base_val: s_idx += 1 continue if in_list[e_idx]", "first_idx = s_idx last_idx = e_idx base_idx = int((s_idx + e_idx)/2) base_val =", "= int((s_idx + e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx", "1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx -= 1", "base_val: final_idx = s_idx else: final_idx = s_idx - 1 in_list[first_idx], in_list[final_idx] =", "from typing import List, NoReturn def quick_sort(in_list: List[int], s_idx: int, e_idx: int) ->", "e_idx)/2) base_val = in_list[base_idx] in_list[first_idx], in_list[base_idx] = in_list[base_idx], in_list[first_idx] s_idx += 1 while", "quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1)", "= in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\":", "1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if", "== [2, 3, 12, 12, 15, 23, 33, 54, 73, 87, 89, 94,", "<= base_val: s_idx += 1 continue if in_list[e_idx] > base_val: e_idx -= 1", "= s_idx - 1 in_list[first_idx], in_list[final_idx] = in_list[final_idx], in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list,", "in_list[s_idx] <= base_val: final_idx = s_idx else: final_idx = s_idx - 1 in_list[first_idx],", "int, e_idx: int) -> NoReturn: if e_idx > s_idx: first_idx = s_idx last_idx", "+= 1 continue if in_list[e_idx] > base_val: e_idx -= 1 continue in_list[s_idx], in_list[e_idx]", "__name__ == \"__main__\": in_list = [15,12,73,23,89,2,87,94,54,3,162,12,33] quick_sort(in_list, 0, len(in_list)-1) assert in_list == [2,", "= s_idx last_idx = e_idx base_idx = int((s_idx + e_idx)/2) base_val = in_list[base_idx]", "> s_idx: first_idx = s_idx last_idx = e_idx base_idx = int((s_idx + e_idx)/2)", "int) -> NoReturn: if e_idx > s_idx: first_idx = s_idx last_idx = e_idx", "in_list[first_idx] quick_sort(in_list, first_idx, final_idx-1) quick_sort(in_list, final_idx+1, last_idx) if __name__ == \"__main__\": in_list =", "0, len(in_list)-1) assert in_list == [2, 3, 12, 12, 15, 23, 33, 54,", "while e_idx > s_idx: if in_list[s_idx] <= base_val: s_idx += 1 continue if", "e_idx -= 1 continue in_list[s_idx], in_list[e_idx] = in_list[e_idx], in_list[s_idx] s_idx += 1 e_idx" ]
[ "+ p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif", "query_string): cached_files = get_cached_files(path_and_query, server) for i in cached_files: try: os.remove(i) except OSError", "environ.get('RAW_URI', '')) if request_uri == '': uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '')", "'status_message': 'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR =", "= self.app(environ, sr) if response['status'] == '404 Not Found': request_uri = wsgiref.util.request_uri(environ) p", "\"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title>", "'404 Not Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri =", "Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path +", "'message': \"Method not allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed', } start_response(\"405 Method", "Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ, start_response): import wsgiref response = {}", "get_cached_files from settings import DEBUG server = environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query", "= request_uri return self.fallback(environ, start_response) elif response['status'] == '405 Method Not Allowed': request_uri", "Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return [] def do_500(environ, start_response, message): resp", "as e: return do_500(environ, start_response, e.message) start_response(\"204 No Content\", []) return [] else:", "- {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def __init__(self, fallback): from static import", "import is_valid_security, get_cached_files from settings import DEBUG server = environ['SERVER_NAME'] try: request_uri =", "response['status'] = status response['headers'] = headers result = self.app(environ, sr) if response['status'] ==", "No Content\", []) return [] else: return do_405(environ, start_response) except Http404 as e:", "in cached_files: try: os.remove(i) except OSError as e: return do_500(environ, start_response, e.message) start_response(\"204", "== path: # return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)])", "Not Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path", "import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '': uri =", "Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if debug: message", "scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return request_uri", "= uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response): \"\"\" Handle a PURGE request.", "BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ, start_response): import wsgiref", "from settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ,", "e.message, DEBUG) def do_redirect(environ, start_response, path): # if get_path(environ) == path: # return", "p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif response['status']", "util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '': uri = util.request_uri(environ)", "'message': message, 'status_code': 404, 'status_message': 'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")])", "uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme,", "<head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class", "XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head>", "is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for i in cached_files: try: os.remove(i) except", "<html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message}", "} start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why,", "if p.query: request_uri = p.path + \"?\" + p.query else: request_uri = p.path", "start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp =", "start_response): import wsgiref response = {} def sr(status, headers): response['status'] = status response['headers']", "e.message) start_response(\"204 No Content\", []) return [] else: return do_405(environ, start_response) except Http404", "request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path + \"?\"", "start_response) except Http404 as e: return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response,", "self.app(environ, sr) if response['status'] == '404 Not Found': request_uri = wsgiref.util.request_uri(environ) p =", "= p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif response['status'] == '405 Method", "response = {} def sr(status, headers): response['status'] = status response['headers'] = headers result", "<body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def __init__(self, fallback): from", "if debug: message = \"<h2>%s</h2>\" % why else: message = \"File not found\"", "PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} -", "request. \"\"\" from utils import is_valid_security, get_cached_files from settings import DEBUG server =", "return self.fallback(environ, start_response) elif response['status'] == '405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ)", "os import urlparse class Http404(Exception): pass def get_path(environ): \"\"\" Get the path \"\"\"", "request_uri def handle_purge(environ, start_response): \"\"\" Handle a PURGE request. \"\"\" from utils import", "cached_files: try: os.remove(i) except OSError as e: return do_500(environ, start_response, e.message) start_response(\"204 No", "path \"\"\" from wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri", "Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri =", "\"\"\" Get the path \"\"\" from wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI',", "= util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return request_uri def", "def handle_purge(environ, start_response): \"\"\" Handle a PURGE request. \"\"\" from utils import is_valid_security,", "'status_code': 500, 'status_message': 'Internal Server Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")])", "<?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\"", "'message': message, 'status_code': 500, 'status_message': 'Internal Server Error', } start_response(\"500 Internal Server Error\",", "= Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ, start_response): import wsgiref response =", "return [] def do_500(environ, start_response, message): resp = { 'message': message, 'status_code': 500,", "Http404(Exception): pass def get_path(environ): \"\"\" Get the path \"\"\" from wsgiref import util", "== '405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query:", "% why else: message = \"File not found\" resp = { 'message': message,", "debug): if debug: message = \"<h2>%s</h2>\" % why else: message = \"File not", "= environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix,", "# return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return []", "get_path(environ): \"\"\" Get the path \"\"\" from wsgiref import util request_uri = environ.get('REQUEST_URI',", "<title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object):", "__init__(self, fallback): from static import Cling from settings import BASE_PATH self.app = Cling(BASE_PATH)", "do_500(environ, start_response, message): resp = { 'message': message, 'status_code': 500, 'status_message': 'Internal Server", "if response['status'] == '404 Not Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if", "Handle a PURGE request. \"\"\" from utils import is_valid_security, get_cached_files from settings import", "from settings import DEBUG server = environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query =", "= p.path + \"?\" + p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri", "Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if", "why else: message = \"File not found\" resp = { 'message': message, 'status_code':", "Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp = { 'message':", "Detected') start_response(\"302 Found\", [(\"Location\", path)]) return [] def do_500(environ, start_response, message): resp =", "p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif response['status'] == '405 Method Not", "start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug):", "wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path + \"?\" + p.query", "request_uri return self.fallback(environ, start_response) elif response['status'] == '405 Method Not Allowed': request_uri =", "[(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC", "except OSError as e: return do_500(environ, start_response, e.message) start_response(\"204 No Content\", []) return", "- {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def", "[] def do_500(environ, start_response, message): resp = { 'message': message, 'status_code': 500, 'status_message':", "request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for", "wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '': uri", "'') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for i in cached_files: try:", "from static import Cling from settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback =", "OSError as e: return do_500(environ, start_response, e.message) start_response(\"204 No Content\", []) return []", "'Method Not Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def", "== '404 Not Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri", "= get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files", "p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) else: start_response(response['status'],", "DEBUG server = environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string =", "= environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '': uri = util.request_uri(environ) host =", "i in cached_files: try: os.remove(i) except OSError as e: return do_500(environ, start_response, e.message)", "self.app = Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ, start_response): import wsgiref response", "{status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def __init__(self,", "def do_404(environ, start_response, why, debug): if debug: message = \"<h2>%s</h2>\" % why else:", "try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE',", "return [] else: return do_405(environ, start_response) except Http404 as e: return do_404(environ, start_response,", "Server Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ,", "DEBUG) def do_redirect(environ, start_response, path): # if get_path(environ) == path: # return do_500(environ,", "p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path + \"?\" + p.query else:", "allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\",", "wsgiref response = {} def sr(status, headers): response['status'] = status response['headers'] = headers", "resp = { 'message': message, 'status_code': 404, 'status_message': 'Not Found', } start_response(\"404 Not", "__call__(self, environ, start_response): import wsgiref response = {} def sr(status, headers): response['status'] =", "[] else: return do_405(environ, start_response) except Http404 as e: return do_404(environ, start_response, e.message,", "= fallback def __call__(self, environ, start_response): import wsgiref response = {} def sr(status,", "start_response, e.message) start_response(\"204 No Content\", []) return [] else: return do_405(environ, start_response) except", "return request_uri def handle_purge(environ, start_response): \"\"\" Handle a PURGE request. \"\"\" from utils", "html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code}", "from wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '':", "'405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri", "util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return request_uri def handle_purge(environ,", "Not Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path", "why, debug): if debug: message = \"<h2>%s</h2>\" % why else: message = \"File", "'') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return", "the path \"\"\" from wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if", "handle_purge(environ, start_response): \"\"\" Handle a PURGE request. \"\"\" from utils import is_valid_security, get_cached_files", "+ p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) else:", "start_response): resp = { 'message': \"Method not allowed\", 'status_code': 405, 'status_message': 'Method Not", "urlparse class Http404(Exception): pass def get_path(environ): \"\"\" Get the path \"\"\" from wsgiref", "Not Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ,", "\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} -", "start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return [] def do_500(environ, start_response,", "not found\" resp = { 'message': message, 'status_code': 404, 'status_message': 'Not Found', }", "uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response): \"\"\" Handle a PURGE request. \"\"\"", "server) for i in cached_files: try: os.remove(i) except OSError as e: return do_500(environ,", "Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response,", "Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code}", "get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files =", "'': uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix =", "start_response, path): # if get_path(environ) == path: # return do_500(environ, start_response, 'Redirect Loop", "import BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ, start_response): import", "Cling from settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback def __call__(self,", "PURGE request. \"\"\" from utils import is_valid_security, get_cached_files from settings import DEBUG server", "path: # return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return", "do_redirect(environ, start_response, path): # if get_path(environ) == path: # return do_500(environ, start_response, 'Redirect", "xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body>", "host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri =", "environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for i in cached_files:", "is_valid_security, get_cached_files from settings import DEBUG server = environ['SERVER_NAME'] try: request_uri = get_path(environ)", "class DemoApp(object): def __init__(self, fallback): from static import Cling from settings import BASE_PATH", "= { 'message': \"Method not allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed', }", "= { 'message': message, 'status_code': 404, 'status_message': 'Not Found', } start_response(\"404 Not Found\",", "} start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp", "\"\"\" class DemoApp(object): def __init__(self, fallback): from static import Cling from settings import", "request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string):", "</body> </html> \"\"\" class DemoApp(object): def __init__(self, fallback): from static import Cling from", "= request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server)", "a PURGE request. \"\"\" from utils import is_valid_security, get_cached_files from settings import DEBUG", "path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query,", "= get_cached_files(path_and_query, server) for i in cached_files: try: os.remove(i) except OSError as e:", "if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for i in cached_files: try: os.remove(i)", "\"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD", "import Cling from settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback def", "= { 'message': message, 'status_code': 500, 'status_message': 'Internal Server Error', } start_response(\"500 Internal", "sr(status, headers): response['status'] = status response['headers'] = headers result = self.app(environ, sr) if", "import os import urlparse class Http404(Exception): pass def get_path(environ): \"\"\" Get the path", "elif response['status'] == '405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri)", "found\" resp = { 'message': message, 'status_code': 404, 'status_message': 'Not Found', } start_response(\"404", "start_response, why, debug): if debug: message = \"<h2>%s</h2>\" % why else: message =", "fallback def __call__(self, environ, start_response): import wsgiref response = {} def sr(status, headers):", "ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"", "prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response):", "'')) if request_uri == '': uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme", "= util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host)", "query_string = environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for i", "start_response, e.message, DEBUG) def do_redirect(environ, start_response, path): # if get_path(environ) == path: #", "{ 'message': message, 'status_code': 404, 'status_message': 'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\",", "</html> \"\"\" class DemoApp(object): def __init__(self, fallback): from static import Cling from settings", "status response['headers'] = headers result = self.app(environ, sr) if response['status'] == '404 Not", "return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if debug: message = \"<h2>%s</h2>\" %", "def __call__(self, environ, start_response): import wsgiref response = {} def sr(status, headers): response['status']", "message = \"<h2>%s</h2>\" % why else: message = \"File not found\" resp =", "environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '') if", "404, 'status_message': 'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR", "DemoApp(object): def __init__(self, fallback): from static import Cling from settings import BASE_PATH self.app", "encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\">", "== '': uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix", "request_uri == '': uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ)", "[(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp = { 'message': \"Method not", "debug: message = \"<h2>%s</h2>\" % why else: message = \"File not found\" resp", "environ, start_response): import wsgiref response = {} def sr(status, headers): response['status'] = status", "response['headers'] = headers result = self.app(environ, sr) if response['status'] == '404 Not Found':", "Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp = {", "'status_code': 405, 'status_message': 'Method Not Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")])", "os.remove(i) except OSError as e: return do_500(environ, start_response, e.message) start_response(\"204 No Content\", [])", "return do_500(environ, start_response, e.message) start_response(\"204 No Content\", []) return [] else: return do_405(environ,", "{ 'message': message, 'status_code': 500, 'status_message': 'Internal Server Error', } start_response(\"500 Internal Server", "do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response, path): # if get_path(environ) == path:", "request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) else: start_response(response['status'], response['headers']) return", "= headers result = self.app(environ, sr) if response['status'] == '404 Not Found': request_uri", "= environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING', '')", "server = environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string = environ.get('QUERY_STRING',", "as e: return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response, path): # if", "self.fallback(environ, start_response) elif response['status'] == '405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p", "urlparse.urlparse(request_uri) if p.query: request_uri = p.path + \"?\" + p.query else: request_uri =", "do_405(environ, start_response): resp = { 'message': \"Method not allowed\", 'status_code': 405, 'status_message': 'Method", "Http404 as e: return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response, path): #", "start_response(\"204 No Content\", []) return [] else: return do_405(environ, start_response) except Http404 as", "try: os.remove(i) except OSError as e: return do_500(environ, start_response, e.message) start_response(\"204 No Content\",", "return [ERROR.format(**resp)] def do_405(environ, start_response): resp = { 'message': \"Method not allowed\", 'status_code':", "Content\", []) return [] else: return do_405(environ, start_response) except Http404 as e: return", "\"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html", "} start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\"", "[ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0", "<reponame>natgeosociety/Transmogrify import os import urlparse class Http404(Exception): pass def get_path(environ): \"\"\" Get the", "{status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def __init__(self, fallback): from static import Cling", "= status response['headers'] = headers result = self.app(environ, sr) if response['status'] == '404", "Found\", [(\"Location\", path)]) return [] def do_500(environ, start_response, message): resp = { 'message':", "'status_code': 404, 'status_message': 'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)]", "request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '': uri = util.request_uri(environ) host", "'') return request_uri def handle_purge(environ, start_response): \"\"\" Handle a PURGE request. \"\"\" from", "pass def get_path(environ): \"\"\" Get the path \"\"\" from wsgiref import util request_uri", "response['status'] == '404 Not Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query:", "resp = { 'message': \"Method not allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed',", "headers): response['status'] = status response['headers'] = headers result = self.app(environ, sr) if response['status']", "[(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if debug: message =", "fallback): from static import Cling from settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback", "p.path + \"?\" + p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return", "xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html>", "import DEBUG server = environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\") query_string", "Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response):", "settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback def __call__(self, environ, start_response):", "def sr(status, headers): response['status'] = status response['headers'] = headers result = self.app(environ, sr)", "p.query: request_uri = p.path + \"?\" + p.query else: request_uri = p.path environ['REQUEST_URI']", "else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif response['status'] ==", "start_response) elif response['status'] == '405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p =", "def do_500(environ, start_response, message): resp = { 'message': message, 'status_code': 500, 'status_message': 'Internal", "util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri", "Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp = { 'message': \"Method", "return do_405(environ, start_response) except Http404 as e: return do_404(environ, start_response, e.message, DEBUG) def", "\"\"\" from utils import is_valid_security, get_cached_files from settings import DEBUG server = environ['SERVER_NAME']", "'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\"", "class Http404(Exception): pass def get_path(environ): \"\"\" Get the path \"\"\" from wsgiref import", "start_response): \"\"\" Handle a PURGE request. \"\"\" from utils import is_valid_security, get_cached_files from", "result = self.app(environ, sr) if response['status'] == '404 Not Found': request_uri = wsgiref.util.request_uri(environ)", "<h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def __init__(self, fallback): from static", "= environ.get('QUERY_STRING', '') if is_valid_security('PURGE', query_string): cached_files = get_cached_files(path_and_query, server) for i in", "self.fallback = fallback def __call__(self, environ, start_response): import wsgiref response = {} def", "Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE", "\"<h2>%s</h2>\" % why else: message = \"File not found\" resp = { 'message':", "\"\"\" Handle a PURGE request. \"\"\" from utils import is_valid_security, get_cached_files from settings", "do_500(environ, start_response, e.message) start_response(\"204 No Content\", []) return [] else: return do_405(environ, start_response)", "from utils import is_valid_security, get_cached_files from settings import DEBUG server = environ['SERVER_NAME'] try:", "e: return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response, path): # if get_path(environ)", "environ.get('HTTP_HOST', '') scheme = util.guess_scheme(environ) prefix = \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '')", "def get_path(environ): \"\"\" Get the path \"\"\" from wsgiref import util request_uri =", "settings import DEBUG server = environ['SERVER_NAME'] try: request_uri = get_path(environ) path_and_query = request_uri.lstrip(\"/\")", "e: return do_500(environ, start_response, e.message) start_response(\"204 No Content\", []) return [] else: return", "= \"File not found\" resp = { 'message': message, 'status_code': 404, 'status_message': 'Not", "else: message = \"File not found\" resp = { 'message': message, 'status_code': 404,", "[]) return [] else: return do_405(environ, start_response) except Http404 as e: return do_404(environ,", "return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response, path): # if get_path(environ) ==", "'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return [] def do_500(environ, start_response, message):", "= p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) else: start_response(response['status'], response['headers']) return result", "cached_files = get_cached_files(path_and_query, server) for i in cached_files: try: os.remove(i) except OSError as", "message, 'status_code': 404, 'status_message': 'Not Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return", "Found', } start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml", "\"File not found\" resp = { 'message': message, 'status_code': 404, 'status_message': 'Not Found',", "\"?\" + p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response)", "if request_uri == '': uri = util.request_uri(environ) host = environ.get('HTTP_HOST', '') scheme =", "return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML", "not allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed', } start_response(\"405 Method Not Allowed\",", "import wsgiref response = {} def sr(status, headers): response['status'] = status response['headers'] =", "get_path(environ) == path: # return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\",", "except Http404 as e: return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ, start_response, path):", "message = \"File not found\" resp = { 'message': message, 'status_code': 404, 'status_message':", "request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif response['status'] == '405", "else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) else: start_response(response['status'], response['headers'])", "# if get_path(environ) == path: # return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302", "\"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response): \"\"\" Handle", "message, 'status_code': 500, 'status_message': 'Internal Server Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\",", "def do_redirect(environ, start_response, path): # if get_path(environ) == path: # return do_500(environ, start_response,", "host=host) request_uri = uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response): \"\"\" Handle a", "1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body>", "Get the path \"\"\" from wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', ''))", "'status_message': 'Method Not Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)]", "lang=\"en\"> <head> <title>{status_code} - {status_message}</title> </head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\"", "{} def sr(status, headers): response['status'] = status response['headers'] = headers result = self.app(environ,", "environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri == '': uri = util.request_uri(environ) host = environ.get('HTTP_HOST',", "request_uri = uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response): \"\"\" Handle a PURGE", "= urlparse.urlparse(request_uri) if p.query: request_uri = p.path + \"?\" + p.query else: request_uri", "Not Allowed\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if debug:", "\"Method not allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed', } start_response(\"405 Method Not", "405, 'status_message': 'Method Not Allowed', } start_response(\"405 Method Not Allowed\", [(\"Content-Type\", \"text/html\")]) return", "do_404(environ, start_response, why, debug): if debug: message = \"<h2>%s</h2>\" % why else: message", "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\" lang=\"en\"> <head>", "return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return [] def", "start_response, message): resp = { 'message': message, 'status_code': 500, 'status_message': 'Internal Server Error',", "[(\"Location\", path)]) return [] def do_500(environ, start_response, message): resp = { 'message': message,", "[ERROR.format(**resp)] def do_405(environ, start_response): resp = { 'message': \"Method not allowed\", 'status_code': 405,", "def do_405(environ, start_response): resp = { 'message': \"Method not allowed\", 'status_code': 405, 'status_message':", "{ 'message': \"Method not allowed\", 'status_code': 405, 'status_message': 'Method Not Allowed', } start_response(\"405", "import urlparse class Http404(Exception): pass def get_path(environ): \"\"\" Get the path \"\"\" from", "path): # if get_path(environ) == path: # return do_500(environ, start_response, 'Redirect Loop Detected')", "500, 'status_message': 'Internal Server Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return", "Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path +", "= \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">", "[ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if debug: message = \"<h2>%s</h2>\" % why", "static import Cling from settings import BASE_PATH self.app = Cling(BASE_PATH) self.fallback = fallback", "def __init__(self, fallback): from static import Cling from settings import BASE_PATH self.app =", "\"\"\" from wsgiref import util request_uri = environ.get('REQUEST_URI', environ.get('RAW_URI', '')) if request_uri ==", "+ \"?\" + p.query else: request_uri = p.path environ['REQUEST_URI'] = request_uri return self.fallback(environ,", "for i in cached_files: try: os.remove(i) except OSError as e: return do_500(environ, start_response,", "if get_path(environ) == path: # return do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\",", "Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html", "path)]) return [] def do_500(environ, start_response, message): resp = { 'message': message, 'status_code':", "= \"{scheme}://{host}\".format(scheme=scheme, host=host) request_uri = uri.replace(prefix, '') return request_uri def handle_purge(environ, start_response): \"\"\"", "get_cached_files(path_and_query, server) for i in cached_files: try: os.remove(i) except OSError as e: return", "environ['REQUEST_URI'] = request_uri return self.fallback(environ, start_response) elif response['status'] == '405 Method Not Allowed':", "response['status'] == '405 Method Not Allowed': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if", "'status_message': 'Internal Server Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)]", "do_405(environ, start_response) except Http404 as e: return do_404(environ, start_response, e.message, DEBUG) def do_redirect(environ,", "version=\"1.0\" encoding=\"iso-8859-1\"?> <!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\"> <html xmlns=\"http://www.w3.org/1999/xhtml\" xml:lang=\"en\"", "resp = { 'message': message, 'status_code': 500, 'status_message': 'Internal Server Error', } start_response(\"500", "do_500(environ, start_response, 'Redirect Loop Detected') start_response(\"302 Found\", [(\"Location\", path)]) return [] def do_500(environ,", "= \"<h2>%s</h2>\" % why else: message = \"File not found\" resp = {", "start_response(\"302 Found\", [(\"Location\", path)]) return [] def do_500(environ, start_response, message): resp = {", "utils import is_valid_security, get_cached_files from settings import DEBUG server = environ['SERVER_NAME'] try: request_uri", "\"text/html\")]) return [ERROR.format(**resp)] def do_405(environ, start_response): resp = { 'message': \"Method not allowed\",", "headers result = self.app(environ, sr) if response['status'] == '404 Not Found': request_uri =", "</head> <body> <h1>{status_code} - {status_message}</h1>{message} </body> </html> \"\"\" class DemoApp(object): def __init__(self, fallback):", "\"text/html\")]) return [ERROR.format(**resp)] def do_404(environ, start_response, why, debug): if debug: message = \"<h2>%s</h2>\"", "start_response(\"404 Not Found\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] ERROR = \"\"\" <?xml version=\"1.0\" encoding=\"iso-8859-1\"?>", "= {} def sr(status, headers): response['status'] = status response['headers'] = headers result =", "= wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri) if p.query: request_uri = p.path + \"?\" +", "else: return do_405(environ, start_response) except Http404 as e: return do_404(environ, start_response, e.message, DEBUG)", "sr) if response['status'] == '404 Not Found': request_uri = wsgiref.util.request_uri(environ) p = urlparse.urlparse(request_uri)", "request_uri = p.path + \"?\" + p.query else: request_uri = p.path environ['REQUEST_URI'] =", "'Internal Server Error', } start_response(\"500 Internal Server Error\", [(\"Content-Type\", \"text/html\")]) return [ERROR.format(**resp)] def", "message): resp = { 'message': message, 'status_code': 500, 'status_message': 'Internal Server Error', }" ]
[ "= int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \",", "yCentroid = 0 listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid", "xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid)", "round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return", "= name.split() if name == \"\": print(\"No points read. So there is no", "listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i in range(0,", "listCentroid) print (\"Closest point to the Centroid: \", listSmaller) print (\"Farthest point from", "listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid", "\"\": break else: partsB = name.split() for ind in range(0,2): partsB[ind] = int(partsB[ind])", "math def centroid(listCoordinates): xCentroid = 0 yCentroid = 0 listCentroid=[] for i in", "in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i >= 1:", "range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1])", "\") partsA = name.split() if name == \"\": print(\"No points read. So there", "0 listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1]", "in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid /", "listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name =", "import math def centroid(listCoordinates): xCentroid = 0 yCentroid = 0 listCentroid=[] for i", "= round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[]", "temp_b = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b", "= int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i >= 1: name = input(\"Type", "listDistance=[] for i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2)", "len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1)", "i in range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b = listDistance[i] listBigger =", "listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest", "listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)): distance", "listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0] for", "\") if name == \"\": break else: partsB = name.split() for ind in", "range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i >= 1: name", "centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0]", "= name.split() for ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates)", "listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0] for i in range(1, len(listDistance)): if", "temp_b = listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates =", "temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0] for i in range(1,", "coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0]", "yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid /", "len(listDistance)): if listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b =", "= listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = []", "+= listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates),", "= centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point", "if name == \"\": break else: partsB = name.split() for ind in range(0,2):", "def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)): distance =", "in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates,", "if name == \"\": print(\"No points read. So there is no centroid!!!\") exit()", "(\"Closest point to the Centroid: \", listSmaller) print (\"Farthest point from the Centroid:", "xCentroid = 0 yCentroid = 0 listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid", "partsA = name.split() if name == \"\": print(\"No points read. So there is", "= listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0] for i in range(1, len(listDistance)):", "= round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid)", "So there is no centroid!!!\") exit() for ind in range(0,2): partsA[ind] = int(partsA[ind])", "listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i in", "name.split() if name == \"\": print(\"No points read. So there is no centroid!!!\")", "name == \"\": print(\"No points read. So there is no centroid!!!\") exit() for", "listCoordinates[i] temp_b = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] > temp_b:", "len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for", "in range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i]", "listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print", "listSmaller = listCoordinates[i] temp_b = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i]", "else: partsB = name.split() for ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid", "listDistance[i] > temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program", "1) yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates,", "input(\"Type a coordinate: \") partsA = name.split() if name == \"\": print(\"No points", "while i >= 1: name = input(\"Type a coordinate: \") if name ==", "listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point to the Centroid: \", listSmaller) print", "listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point to the", "listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b = listDistance[i]", "return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type a coordinate:", "/ len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid", "= listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b =", "centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0] for i in range(1, len(listDistance)): if", "if listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0]", "< temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0] for i", "point to the Centroid: \", listSmaller) print (\"Farthest point from the Centroid: \",", "i = 1 while i >= 1: name = input(\"Type a coordinate: \")", "name == \"\": break else: partsB = name.split() for ind in range(0,2): partsB[ind]", "listDistance.append(distance) temp_a = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] < temp_a:", "listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point to the Centroid:", "yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid):", "to the Centroid: \", listSmaller) print (\"Farthest point from the Centroid: \", listBigger)", "[] name = input(\"Type a coordinate: \") partsA = name.split() if name ==", "is no centroid!!!\") exit() for ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i", "listCoordinates.append(partsA) i = 1 while i >= 1: name = input(\"Type a coordinate:", "range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates),", "listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name", "listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type a coordinate: \")", "no centroid!!!\") exit() for ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i =", "listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type a coordinate: \") partsA", "temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates", "= math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance)", "i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1]", "coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point to the Centroid: \", listSmaller)", "listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) **", "\"\": print(\"No points read. So there is no centroid!!!\") exit() for ind in", "== \"\": break else: partsB = name.split() for ind in range(0,2): partsB[ind] =", "- centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0] for i in range(1, len(listDistance)):", "coordinate: \") partsA = name.split() if name == \"\": print(\"No points read. So", "ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i >=", "<gh_stars>0 import math def centroid(listCoordinates): xCentroid = 0 yCentroid = 0 listCentroid=[] for", "= 0 listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid +=", "int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i >= 1: name = input(\"Type a", "partsB = name.split() for ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid =", "listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type a", "temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b = listDistance[0] for i in", "math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a", "return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)):", "name.split() for ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller,", "** 2)) listDistance.append(distance) temp_a = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i]", "= listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b =", "len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def", "#--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type a coordinate: \") partsA =", "listCoordinates = [] name = input(\"Type a coordinate: \") partsA = name.split() if", "= listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type", "listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b = listDistance[i]", "input(\"Type a coordinate: \") if name == \"\": break else: partsB = name.split()", "for ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger", "= 0 yCentroid = 0 listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid +=", "len(listDistance)): if listDistance[i] > temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i] return listSmaller,", "> temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger #--------------------------Program Body--------------------------------------------------------------", "break else: partsB = name.split() for ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB)", "centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point to", "0 yCentroid = 0 listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0]", "+= listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid =", "= input(\"Type a coordinate: \") if name == \"\": break else: partsB =", "name = input(\"Type a coordinate: \") if name == \"\": break else: partsB", "partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i >= 1: name =", "2) + ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0] for i", "if listDistance[i] > temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i] return listSmaller, listBigger", "ind in range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger =", "len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) **", "a coordinate: \") partsA = name.split() if name == \"\": print(\"No points read.", "temp_a = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b", "2)) listDistance.append(distance) temp_a = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] <", "xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid", "Body-------------------------------------------------------------- listCoordinates = [] name = input(\"Type a coordinate: \") partsA = name.split()", "1 while i >= 1: name = input(\"Type a coordinate: \") if name", "i in range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller =", "def centroid(listCoordinates): xCentroid = 0 yCentroid = 0 listCentroid=[] for i in range(0,", "= [] name = input(\"Type a coordinate: \") partsA = name.split() if name", "/ len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[]", "centroid(listCoordinates): xCentroid = 0 yCentroid = 0 listCentroid=[] for i in range(0, len(listCoordinates)):", "for i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid =", "1: name = input(\"Type a coordinate: \") if name == \"\": break else:", "in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] -", "i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid = round(xCentroid", "= coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid) print (\"Closest point to the Centroid: \",", "print(\"No points read. So there is no centroid!!!\") exit() for ind in range(0,2):", "int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid: \", listCentroid)", "a coordinate: \") if name == \"\": break else: partsB = name.split() for", "1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[] listDistance=[] for i", "read. So there is no centroid!!!\") exit() for ind in range(0,2): partsA[ind] =", "i >= 1: name = input(\"Type a coordinate: \") if name == \"\":", "print (\"Closest point to the Centroid: \", listSmaller) print (\"Farthest point from the", "range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b = listDistance[i] listBigger = listCoordinates[i] return", "coordinate: \") if name == \"\": break else: partsB = name.split() for ind", "partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid) print(\"Centroid:", "= 1 while i >= 1: name = input(\"Type a coordinate: \") if", "for i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) +", "listCoordinates[i][1] xCentroid = round(xCentroid / len(listCoordinates), 1) yCentroid = round(yCentroid / len(listCoordinates), 1)", "\", listCentroid) print (\"Closest point to the Centroid: \", listSmaller) print (\"Farthest point", "((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0] for i in range(1,", "round(yCentroid / len(listCoordinates), 1) listCentroid.append(xCentroid) listCentroid.append(yCentroid) return listCentroid def coordinatesDistance(listCoordinates, centroid): listSmaller=[] listBigger=[]", "range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i] temp_b", "points read. So there is no centroid!!!\") exit() for ind in range(0,2): partsA[ind]", "for ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1 while i", "= input(\"Type a coordinate: \") partsA = name.split() if name == \"\": print(\"No", "listCentroid=[] for i in range(0, len(listCoordinates)): xCentroid += listCoordinates[i][0] yCentroid += listCoordinates[i][1] xCentroid", "for i in range(1, len(listDistance)): if listDistance[i] > temp_b: temp_b = listDistance[i] listBigger", "there is no centroid!!!\") exit() for ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA)", ">= 1: name = input(\"Type a coordinate: \") if name == \"\": break", "in range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller = listCoordinates[i]", "centroid!!!\") exit() for ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1", "listSmaller=[] listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] - centroid[0])", "distance = math.sqrt(((listCoordinates[i][0] - centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2))", "range(0,2): partsB[ind] = int(partsB[ind]) listCoordinates.append(partsB) listCentroid = centroid(listCoordinates) listSmaller, listBigger = coordinatesDistance(listCoordinates, listCentroid)", "centroid): listSmaller=[] listBigger=[] listDistance=[] for i in range(0, len(listCoordinates)): distance = math.sqrt(((listCoordinates[i][0] -", "exit() for ind in range(0,2): partsA[ind] = int(partsA[ind]) listCoordinates.append(partsA) i = 1 while", "= listCoordinates[i] temp_b = listDistance[0] for i in range(1, len(listDistance)): if listDistance[i] >", "print(\"Centroid: \", listCentroid) print (\"Closest point to the Centroid: \", listSmaller) print (\"Farthest", "- centroid[0]) ** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a =", "name = input(\"Type a coordinate: \") partsA = name.split() if name == \"\":", "+ ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0] for i in", "== \"\": print(\"No points read. So there is no centroid!!!\") exit() for ind", "** 2) + ((listCoordinates[i][1] - centroid[1]) ** 2)) listDistance.append(distance) temp_a = listDistance[0] for", "for i in range(1, len(listDistance)): if listDistance[i] < temp_a: temp_b = listDistance[i] listSmaller" ]
[ "is governed by the Apache v2.0 license that can be # found in", "response.body.get('host_token') self.assertTrue(token) # Verify it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token})", "CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self):", "test for Cloud Endpoints support in auth component. It launches app via dev_appserver", "the source code. I'm not sure it's a good idea. pass def check_forbidden(self):", "pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self):", "of cloud endpoints methods. \"\"\" import unittest import os import test_env test_env.setup_test_env() from", "OAuth2 service # to get OAuth2 token. It's doable, but the service account", "file. \"\"\"Smoke test for Cloud Endpoints support in auth component. It launches app", "'/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error': { u'code': 403, u'errors': [ {", "# /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase):", "hardcoded into the source code. I'm not sure it's a good idea. pass", "be # found in the LICENSE file. \"\"\"Smoke test for Cloud Endpoints support", "test_env test_env.setup_test_env() from support import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/.", "return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response =", "# be hardcoded into the source code. I'm not sure it's a good", "first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token)", "via dev_appserver and queries a bunch of cloud endpoints methods. \"\"\" import unittest", "support in auth component. It launches app via dev_appserver and queries a bunch", "os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start()", "('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting with real OAuth2", "= local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally:", "Verify it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name',", "queries a bunch of cloud endpoints methods. \"\"\" import unittest import os import", "rights reserved. # Use of this source code is governed by the Apache", "os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp()", "u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden', }, } self.assertEqual(expected,", "{'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify it is usable.", "with real OAuth2 service # to get OAuth2 token. It's doable, but the", "} self.assertEqual(expected, response.body) def check_host_token(self): # Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token',", "not sure it's a good idea. pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden')", "= self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name', response.body.get('host')) if __name__ == '__main__':", "} ], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self): # Create token", "idea. pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = {", "this source code is governed by the Apache v2.0 license that can be", "= os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest,", "self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1',", "license that can be # found in the LICENSE file. \"\"\"Smoke test for", "Apache v2.0 license that can be # found in the LICENSE file. \"\"\"Smoke", "response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting with", "], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self): # Create token first.", "\"\"\" import unittest import os import test_env test_env.setup_test_env() from support import local_app #", "class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def", "self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self):", "token. It's doable, but the service account secrets had to # be hardcoded", "pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error':", "and queries a bunch of cloud endpoints methods. \"\"\" import unittest import os", "a bunch of cloud endpoints methods. \"\"\" import unittest import os import test_env", "that can be # found in the LICENSE file. \"\"\"Smoke test for Cloud", "LICENSE file. \"\"\"Smoke test for Cloud Endpoints support in auth component. It launches", "auth component. It launches app via dev_appserver and queries a bunch of cloud", "self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify it is usable. response =", "from support import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR =", "setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop()", "check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error': { u'code':", "endpoints methods. \"\"\" import unittest import os import test_env test_env.setup_test_env() from support import", "service account secrets had to # be hardcoded into the source code. I'm", "# TODO(vadimsh): Testing this requires interacting with real OAuth2 service # to get", "# Verify it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code)", "if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101 return not", "self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting with real", "# found in the LICENSE file. \"\"\"Smoke test for Cloud Endpoints support in", "for Cloud Endpoints support in auth component. It launches app via dev_appserver and", "= self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify", "Testing this requires interacting with real OAuth2 service # to get OAuth2 token.", "super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if", "The Swarming Authors. All rights reserved. # Use of this source code is", "self.assertEqual(403, response.http_code) expected = { u'error': { u'code': 403, u'errors': [ { u'domain':", "in the LICENSE file. \"\"\"Smoke test for Cloud Endpoints support in auth component.", "u'reason': u'forbidden', } ], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self): #", "token = response.body.get('host_token') self.assertTrue(token) # Verify it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who',", "{ u'code': 403, u'errors': [ { u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', }", "import os import test_env test_env.setup_test_env() from support import local_app # /components/tests/. THIS_DIR =", "/components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app =", "}, } self.assertEqual(expected, response.body) def check_host_token(self): # Create token first. response = self.app.client.json_request(", "response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) #", "service # to get OAuth2 token. It's doable, but the service account secrets", "Copyright 2014 The Swarming Authors. All rights reserved. # Use of this source", "Authors. All rights reserved. # Use of this source code is governed by", "unittest import os import test_env test_env.setup_test_env() from support import local_app # /components/tests/. THIS_DIR", "9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown()", "self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200,", "be hardcoded into the source code. I'm not sure it's a good idea.", "token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token')", "it's a good idea. pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code)", "self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing", "response.http_code) expected = { u'error': { u'code': 403, u'errors': [ { u'domain': u'global',", "reserved. # Use of this source code is governed by the Apache v2.0", "response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify it is usable. response = self.app.client.json_request(", "= response.body.get('host_token') self.assertTrue(token) # Verify it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1':", "TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR,", "to # be hardcoded into the source code. I'm not sure it's a", "check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting with real OAuth2 service # to", "usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name', response.body.get('host')) if __name__", "response.body) def check_host_token(self): # Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'})", "I'm not sure it's a good idea. pass def check_forbidden(self): response = self.app.client.json_request(", "by the Apache v2.0 license that can be # found in the LICENSE", "u'error': { u'code': 403, u'errors': [ { u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden',", "into the source code. I'm not sure it's a good idea. pass def", "secrets had to # be hardcoded into the source code. I'm not sure", "u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self): # Create token first. response", "test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous',", "u'forbidden', } ], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self): # Create", "\"\"\"Smoke test for Cloud Endpoints support in auth component. It launches app via", "'0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting with real OAuth2 service", "u'errors': [ { u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden',", "real OAuth2 service # to get OAuth2 token. It's doable, but the service", "All rights reserved. # Use of this source code is governed by the", "launches app via dev_appserver and queries a bunch of cloud endpoints methods. \"\"\"", "this requires interacting with real OAuth2 service # to get OAuth2 token. It's", "{ u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden', }, }", "OAuth2 token. It's doable, but the service account secrets had to # be", "def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error': {", "had to # be hardcoded into the source code. I'm not sure it's", "def has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token()", "def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def", "doable, but the service account secrets had to # be hardcoded into the", "python # Copyright 2014 The Swarming Authors. All rights reserved. # Use of", "self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log()", "self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error': { u'code': 403, u'errors': [", "self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1'))", "response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this requires", "the service account secrets had to # be hardcoded into the source code.", "super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous()", "cloud endpoints methods. \"\"\" import unittest import os import test_env test_env.setup_test_env() from support", "# Copyright 2014 The Swarming Authors. All rights reserved. # Use of this", "self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity'))", "import unittest import os import test_env test_env.setup_test_env() from support import local_app # /components/tests/.", "component. It launches app via dev_appserver and queries a bunch of cloud endpoints", "self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this", "to get OAuth2 token. It's doable, but the service account secrets had to", "def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try:", "u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self):", "= self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh):", "source code is governed by the Apache v2.0 license that can be #", "It's doable, but the service account secrets had to # be hardcoded into", "u'Forbidden', }, } self.assertEqual(expected, response.body) def check_host_token(self): # Create token first. response =", "Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token =", "good idea. pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected =", "check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self):", "response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): #", "app via dev_appserver and queries a bunch of cloud endpoints methods. \"\"\" import", "code. I'm not sure it's a good idea. pass def check_forbidden(self): response =", "requires interacting with real OAuth2 service # to get OAuth2 token. It's doable,", "response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name', response.body.get('host')) if __name__ ==", "has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden()", "get OAuth2 token. It's doable, but the service account secrets had to #", "u'code': 403, u'errors': [ { u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ],", "# pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def", "but the service account secrets had to # be hardcoded into the source", "response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error': { u'code': 403,", "def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): #", "self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed():", "self).tearDown() def has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated()", "test_env.setup_test_env() from support import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR", "self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def", "expected = { u'error': { u'code': 403, u'errors': [ { u'domain': u'global', u'message':", "#!/usr/bin/env python # Copyright 2014 The Swarming Authors. All rights reserved. # Use", "THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self):", "u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body)", "self.assertEqual(expected, response.body) def check_host_token(self): # Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host':", "'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify it is usable. response", "check_host_token(self): # Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code)", "os import test_env test_env.setup_test_env() from support import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__))", "2014 The Swarming Authors. All rights reserved. # Use of this source code", "'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving()", "# Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token", "account secrets had to # be hardcoded into the source code. I'm not", "def check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting with real OAuth2 service #", "self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def", "the LICENSE file. \"\"\"Smoke test for Cloud Endpoints support in auth component. It", "import test_env test_env.setup_test_env() from support import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) #", "Use of this source code is governed by the Apache v2.0 license that", "can be # found in the LICENSE file. \"\"\"Smoke test for Cloud Endpoints", "self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code) self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'),", "governed by the Apache v2.0 license that can be # found in the", "sure it's a good idea. pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403,", "self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101 return", "Endpoints support in auth component. It launches app via dev_appserver and queries a", "self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful()", "self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name', response.body.get('host')) if __name__ == '__main__': unittest.main()", "code is governed by the Apache v2.0 license that can be # found", "local_app.LocalApplication(TEST_APP_DIR, 9700) self.app.start() self.app.ensure_serving() def tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest,", "self.assertEqual('anonymous:anonymous', response.body.get('identity')) self.assertIn(response.body.get('ip'), ('127.0.0.1', '0:0:0:0:0:0:0:1')) def check_who_authenticated(self): # TODO(vadimsh): Testing this requires interacting", "of this source code is governed by the Apache v2.0 license that can", "= os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app = local_app.LocalApplication(TEST_APP_DIR, 9700)", "it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name', response.body.get('host'))", "finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self):", "TODO(vadimsh): Testing this requires interacting with real OAuth2 service # to get OAuth2", "# Use of this source code is governed by the Apache v2.0 license", "try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint: disable=E1101", "a good idea. pass def check_forbidden(self): response = self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected", "in auth component. It launches app via dev_appserver and queries a bunch of", "local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class", "u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden', }, } self.assertEqual(expected, response.body) def", "found in the LICENSE file. \"\"\"Smoke test for Cloud Endpoints support in auth", "def check_host_token(self): # Create token first. response = self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200,", "Swarming Authors. All rights reserved. # Use of this source code is governed", "interacting with real OAuth2 service # to get OAuth2 token. It's doable, but", "= { u'error': { u'code': 403, u'errors': [ { u'domain': u'global', u'message': u'Forbidden',", "import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app')", "self.assertTrue(token) # Verify it is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200,", "{ u'error': { u'code': 403, u'errors': [ { u'domain': u'global', u'message': u'Forbidden', u'reason':", "[ { u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message': u'Forbidden', },", "# /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def setUp(self): super(CloudEndpointsSmokeTest, self).setUp() self.app", "bunch of cloud endpoints methods. \"\"\" import unittest import os import test_env test_env.setup_test_env()", "def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who') self.assertEqual(200, response.http_code)", "/components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR, 'endpoints_app') class CloudEndpointsSmokeTest(unittest.TestCase): def", "support import local_app # /components/tests/. THIS_DIR = os.path.dirname(os.path.abspath(__file__)) # /components/tests/endpoints_app/. TEST_APP_DIR = os.path.join(THIS_DIR,", "# to get OAuth2 token. It's doable, but the service account secrets had", "dev_appserver and queries a bunch of cloud endpoints methods. \"\"\" import unittest import", "the Apache v2.0 license that can be # found in the LICENSE file.", "Cloud Endpoints support in auth component. It launches app via dev_appserver and queries", "not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response = self.app.client.json_request('/_ah/api/testing_service/v1/who')", "v2.0 license that can be # found in the LICENSE file. \"\"\"Smoke test", "disable=E1101 return not self._resultForDoCleanups.wasSuccessful() def test_smoke(self): self.check_who_anonymous() self.check_who_authenticated() self.check_host_token() self.check_forbidden() def check_who_anonymous(self): response", "It launches app via dev_appserver and queries a bunch of cloud endpoints methods.", "tearDown(self): try: self.app.stop() if self.has_failed(): self.app.dump_log() finally: super(CloudEndpointsSmokeTest, self).tearDown() def has_failed(self): # pylint:", "source code. I'm not sure it's a good idea. pass def check_forbidden(self): response", "self.app.client.json_request( '/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify it", "403, u'errors': [ { u'domain': u'global', u'message': u'Forbidden', u'reason': u'forbidden', } ], u'message':", "= self.app.client.json_request( '/_ah/api/testing_service/v1/forbidden') self.assertEqual(403, response.http_code) expected = { u'error': { u'code': 403, u'errors':", "is usable. response = self.app.client.json_request( '/_ah/api/testing_service/v1/who', headers={'X-Host-Token-V1': token}) self.assertEqual(200, response.http_code) self.assertEqual('host-name', response.body.get('host')) if", "'/_ah/api/testing_service/v1/create_host_token', {'host': 'host-name'}) self.assertEqual(200, response.http_code) token = response.body.get('host_token') self.assertTrue(token) # Verify it is", "methods. \"\"\" import unittest import os import test_env test_env.setup_test_env() from support import local_app" ]
[ "#tipo do nó. Compound, Assign, ArithOp, etc self.value = None def __str__(self, level=0):", "if down.index(child) is len(down) - 1 else '' next_indent = '{0}{1}{2}'.format(indent, ' '", "self.children: if (child != None): ret += child.__str__(level+1) #level+1 return ret def __repr__(self):", "nb_children(node)) down = [] while up and sum(size_branch[node] for node in down) <", "<gh_stars>0 from Token import Token class AST(object): def __init__(self, nome): self.nome = nome;", "arvoreToXML.write('<' + no.nome + '>\\r\\n') for child in no.children: i = 0 for", "for child in node.children) + 1 size_branch = {child: nb_children(child) for child in", "+ '\\'' elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or no.nome == 'LogicalOp'):", "self.children.append(attr) self.attr = attr if(not(exp is None)): self.children.append(exp) self.exp = exp; if (not(attr", "= right def __repr__(self): return self.nome class If(AST): def __init__(self, exp, c_true, c_false):", "= open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for child", "None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.token = self.op =", "do tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left =", "c_false): AST.__init__(self, 'If') print('Criando um nó do tipo If.') if(not(exp is None)): self.children.append(exp)", "None)): self.children.append(id_) self.id = id_; def __repr__(self): return self.nome class Print(AST): def __init__(self,", "__repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num')", "return self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó do tipo", "def __repr__(self): return self.nome class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um", "return ' value=\\'' + no.token.getLexema() + ' type:\\'' + no.value + '\\'' elif(no.nome", "\"\\t\"*level+ repr(self) +\"\\n\" for child in self.children: if (child != None): ret +=", "if(not(exp is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp", "right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó do tipo LogicalOp com operador", "__repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children =", "if up: end_shape = '┤' elif down: end_shape = '┐' else: end_shape =", "nó do tipo ArithOp com operador ' + str(op)) class RelOp(Expr): def __init__(self,", "= ' ' else: start_shape = '├' if up: end_shape = '┤' elif", "op self.right = right def __repr__(self): return self.nome class If(AST): def __init__(self, exp,", "if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp = exp; self.c_true =", "None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands is None)): self.children.append(commands) self.commands = commands", "open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for child in", "str(op)) class RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando", "'down' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class", "if(no.nome == 'Id'): return ' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome == 'Num'):", "or no.nome == 'LogicalOp'): return ' op=\\'' + no.op + '\\'' else: return", "print('Criando um nó do tipo Num.') #self.children.append(token) self.token = token self.value = token.value", "up: end_shape = '┤' elif down: end_shape = '┐' else: end_shape = ''", "if(not(exp is None)): self.children.append(exp) self.exp = exp; if (not(attr is None)): self.children.append(attr2) self.attr2", "= commands; def __repr__(self): return self.nome class For(AST): def __init__(self, attr, exp, attr2,", "def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó do", "self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp = exp; self.commands = commands; def", "\" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no): count =", "nó do tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id = id_; def __repr__(self):", "return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child) for child", "'ArithOp' or no.nome == 'RelOp' or no.nome == 'LogicalOp'): return ' op=\\'' +", "__evaluate__(self): return self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó do", "self.commands = commands def __repr__(self): return self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read')", "self.right = right def __repr__(self): return self.nome class If(AST): def __init__(self, exp, c_true,", "elif last == 'down': start_shape = '└' elif last == 'updown': start_shape =", "def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children", "child in current_node.children} \"\"\" Creation of balanced lists for \"up\" branch and \"down\"", "block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo Block.') #self.children = []", "is None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands is None)): self.children.append(commands) self.commands =", "for i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome == 'Num'): arvoreToXML.write('<'", "(not(attr is None)): self.children.append(attr) self.attr = attr if(not(exp is None)): self.children.append(exp) self.exp =", "repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um", "commands): AST.__init__(self,'For') print('Criando um nó do tipo For.') if (not(attr is None)): self.children.append(attr)", "um nó do tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp = exp; def", "c_false; def __repr__(self): return self.nome class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando", "for child in down: next_last = 'down' if down.index(child) is len(down) - 1", "self.tipo = None #tipo do nó. Compound, Assign, ArithOp, etc self.value = None", "node: sum(nb_children(child) for child in node.children) + 1 size_branch = {child: nb_children(child) for", "op, left, right) print('Criando um nó do tipo RelOp com operador ' +", "+= child.__str__(level+1) #level+1 return ret def __repr__(self): return self.nome def __evaluate__(self): for child", "repr(self) +\"\\n\" for child in self.children: if (child != None): ret += child.__str__(level+1)", "!= None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\" def", "c_true, c_false): AST.__init__(self, 'If') print('Criando um nó do tipo If.') if(not(exp is None)):", "END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo Block.') #self.children =", "if(not(exp is None)): self.children.append(exp) self.exp = exp; def __repr__(self): return self.nome class Expr(AST):", "right) print('Criando um nó do tipo ArithOp com operador ' + str(op)) class", "__init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó do tipo", "print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current node. \"\"\" if last == 'up':", "return self.nome class If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um", "from Token import Token class AST(object): def __init__(self, nome): self.nome = nome; self.children", "if(not(right is None)): self.children.append(right) self.left = left self.token = self.op = op self.right", "Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó", "(commands is None)): self.children.append(commands) self.commands = commands def __repr__(self): return self.nome class Read(AST):", "Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó do tipo LogicalOp com operador '", "ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' +", "child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' +", "or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<'", "AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right is", "is 0 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last", "print('Criando um nó do tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right is None)):", "child.__str__(level+1) #level+1 return ret def __repr__(self): return self.nome def __evaluate__(self): for child in", "__repr__(self): return self.nome class For(AST): def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando", "is None)): self.children.append(exp) self.exp = exp; if (not(attr is None)): self.children.append(attr2) self.attr2 =", "class AST(object): def __init__(self, nome): self.nome = nome; self.children = [] self.tipo =", "class For(AST): def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó do", "count = count + 1 for child in no.children: i = 0 for", "While.') if(not(exp is None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp = exp;", "nó do tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left", "def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child) for child in node.children)", "' + str(op)) class Id(AST): \"\"\"The Var node is constructed out of ID", "count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome", "= None def __str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for child in", "do tipo RelOp com operador ' + str(op)) class Id(AST): \"\"\"The Var node", "'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo Block.')", "+ '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome", "or no.nome == 'RelOp' or no.nome == 'LogicalOp'): return ' op=\\'' + no.op", "do tipo Block.') #self.children = [] def __repr__(self): return self.nome class Assign(AST): def", "op, left, right) print('Criando um nó do tipo LogicalOp com operador ' +", "!= None): ret += child.__str__(level+1) #level+1 return ret def __repr__(self): return self.nome def", "def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um nó do tipo If.')", "in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome ==", "__evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child) for", "node in up): down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\" for child in", "' else: start_shape = '├' if up: end_shape = '┤' elif down: end_shape", "do tipo LogicalOp com operador ' + str(op)) class ArithOp(Expr): def __init__(self, op,", "= c_false; def __repr__(self): return self.nome class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While')", "sum(nb_children(child) for child in node.children) + 1 size_branch = {child: nb_children(child) for child", "of \"up\" branch. \"\"\" for child in up: next_last = 'up' if up.index(child)", "__repr__(self): return self.nome class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó", "nó do tipo While.') if(not(exp is None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands)", "If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false)", "self.left = left self.token = self.op = op self.right = right def __repr__(self):", "None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp = exp; self.commands = commands;", "= nome; self.children = [] self.tipo = None #tipo do nó. Compound, Assign,", "def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp',", "Printing of current node. \"\"\" if last == 'up': start_shape = '┌' elif", "child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\'' +", "left self.token = self.op = op self.right = right def __repr__(self): return self.nome", "self.value = token.value #em python, não precisamos nos preocupar com o tipo de", "nó do tipo Num.') #self.children.append(token) self.token = token self.value = token.value #em python,", "branch. \"\"\" for child in up: next_last = 'up' if up.index(child) is 0", "'>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count = count + 1 for child", "nb_children(child) for child in current_node.children} \"\"\" Creation of balanced lists for \"up\" branch", "LogicalOp com operador ' + str(op)) class ArithOp(Expr): def __init__(self, op, left, right):", "child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML):", "'up' if up.index(child) is 0 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if", "nó do tipo RelOp com operador ' + str(op)) class Id(AST): \"\"\"The Var", "+ no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return ' value=\\'' + no.token.getLexema() +", "str(op)) class ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando", "node.children) + 1 size_branch = {child: nb_children(child) for child in current_node.children} \"\"\" Creation", "of \"down\" branch. \"\"\" for child in down: next_last = 'down' if down.index(child)", "op, left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left", "is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp = exp; self.c_true = c_true;", "' + str(op)) class RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left,", "um nó do tipo Num.') #self.children.append(token) self.token = token self.value = token.value #em", "start_shape = '└' elif last == 'updown': start_shape = ' ' else: start_shape", "+ no.token.getLexema() + ' type:\\'' + no.value + '\\'' elif(no.nome == 'ArithOp' or", "'>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count =", "AST(object): def __init__(self, nome): self.nome = nome; self.children = [] self.tipo = None", "is None)): self.children.append(right) self.left = left self.op = op self.right = right def", "[] self.tipo = None #tipo do nó. Compound, Assign, ArithOp, etc self.value =", "um nó do tipo ArithOp com operador ' + str(op)) class RelOp(Expr): def", "indent=next_indent, last=next_last) \"\"\" Printing of current node. \"\"\" if last == 'up': start_shape", "tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left", "self.exp = exp; self.c_true = c_true; self.c_false = c_false; def __repr__(self): return self.nome", "(commands is None)): self.children.append(commands) self.exp = exp; self.commands = commands; def __repr__(self): return", "= left self.op = op self.right = right def __repr__(self): #self.left.repr(); return self.op", "left, right) print('Criando um nó do tipo LogicalOp com operador ' + str(op))", "is constructed out of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó", "while up and sum(size_branch[node] for node in down) < sum(size_branch[node] for node in", "= attr if(not(exp is None)): self.children.append(exp) self.exp = exp; if (not(attr is None)):", "down) < sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\" Printing of \"up\" branch.", "deepSearch( no, count,arvoreToXML): count = count + 1 for child in no.children: i", "self.nome class If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um nó", "for child in self.children: if (child != None): return child.__evaluate__() class Compound(AST): \"\"\"Represents", "For.') if (not(attr is None)): self.children.append(attr) self.attr = attr if(not(exp is None)): self.children.append(exp)", "\"\"\"The Var node is constructed out of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id')", "no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return ' value=\\'' + no.token.getLexema() + '", "'up': start_shape = '┌' elif last == 'down': start_shape = '└' elif last", "value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'):", "for child in self.children: if (child != None): ret += child.__str__(level+1) #level+1 return", "class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó do tipo Num.') #self.children.append(token)", "branch and \"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node)) down =", "= None #tipo do nó. Compound, Assign, ArithOp, etc self.value = None def", "self.value = token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST):", "toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' +", "tipo LogicalOp com operador ' + str(op)) class ArithOp(Expr): def __init__(self, op, left,", "for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome", "arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def", "arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'): return '", "= [] self.tipo = None #tipo do nó. Compound, Assign, ArithOp, etc self.value", "self.id = id_; def __repr__(self): return self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print')", "repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node:", "in down) < sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\" Printing of \"up\"", "'├' if up: end_shape = '┤' elif down: end_shape = '┐' else: end_shape", "len(down) - 1 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in", "'RelOp' or no.nome == 'LogicalOp'): return ' op=\\'' + no.op + '\\'' else:", "class RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um", "in up: next_last = 'up' if up.index(child) is 0 else '' next_indent =", "is None)): self.children.append(attr) self.attr = attr if(not(exp is None)): self.children.append(exp) self.exp = exp;", "'>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\'' + no.token.getLexema() +", "Creation of balanced lists for \"up\" branch and \"down\" branch. \"\"\" up =", "print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch. \"\"\" for child in", "is None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp = exp; self.commands =", "print('Criando um nó do tipo RelOp com operador ' + str(op)) class Id(AST):", "right): AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right", "value=\\'' + no.token.getLexema() + ' type:\\'' + no.value + '\\'' elif(no.nome == 'ArithOp'", "== 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome", "print('Criando um nó do tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp = exp;", "= '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│', \" \" *", "None)): self.children.append(attr) self.attr = attr if(not(exp is None)): self.children.append(exp) self.exp = exp; if", "'down' if down.index(child) is len(down) - 1 else '' next_indent = '{0}{1}{2}'.format(indent, '", "tipo de value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node,", "current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch. \"\"\" for child in down: next_last", "left, right) print('Criando um nó do tipo ArithOp com operador ' + str(op))", "__init__(self, nome): self.nome = nome; self.children = [] self.tipo = None #tipo do", "etc self.value = None def __str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for", "== 'updown': start_shape = ' ' else: start_shape = '├' if up: end_shape", "= count + 1 for child in no.children: i = 0 for i", "out of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó do tipo", "'\\'' elif(no.nome == 'Num'): return ' value=\\'' + no.token.getLexema() + ' type:\\'' +", "next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│', \" \"", "ret = \"\\t\"*level+ repr(self) +\"\\n\" for child in self.children: if (child != None):", "ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó", "end_shape)) \"\"\" Printing of \"down\" branch. \"\"\" for child in down: next_last =", "return self.nome class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó do", "__evaluate__(self): for child in self.children: if (child != None): return child.__evaluate__() class Compound(AST):", "ArithOp com operador ' + str(op)) class RelOp(Expr): def __init__(self, left, op, right):", "class Id(AST): \"\"\"The Var node is constructed out of ID token.\"\"\" def __init__(self,", "#self.children = [] def __repr__(self): return self.nome class Assign(AST): def __init__(self, left, op,", "return ret def __repr__(self): return self.nome def __evaluate__(self): for child in self.children: if", "in node.children) + 1 size_branch = {child: nb_children(child) for child in current_node.children} \"\"\"", "arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' +", "Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó do tipo RelOp com operador '", "is None)): self.children.append(right) self.left = left self.token = self.op = op self.right =", "+ no.nome + '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count = count +", "op, left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó do tipo LogicalOp", "right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó do tipo ArithOp com operador", "= right def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self, op, left,", "for child in up: next_last = 'up' if up.index(child) is 0 else ''", "== 'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n')", "= 'up' if up.index(child) is 0 else '' next_indent = '{0}{1}{2}'.format(indent, ' '", "+ '/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML)", "op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó do tipo RelOp com", "= self.op = op self.right = right def __repr__(self): return self.nome class If(AST):", "if 'up' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last)", "is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.op = op", "self.attr2 = attr2 if(not (commands is None)): self.children.append(commands) self.commands = commands def __repr__(self):", "last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of", "'If') print('Criando um nó do tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is", "AST.__init__(self, 'If') print('Criando um nó do tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true", "Id.') #self.children.append(token) self.token = token self.value = token.value def __repr__(self): return repr(self.token.getLexema()) def", "def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do tipo Read.') if(not(id_ is None)):", "do tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id = id_; def __repr__(self): return", "def __repr__(self): return self.nome def __evaluate__(self): for child in self.children: if (child !=", "def __init__(self, nome): self.nome = nome; self.children = [] self.tipo = None #tipo", "'{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│', \" \" * len(current_node.__repr__()))", "nome, op, left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right)", "self.attr = attr if(not(exp is None)): self.children.append(exp) self.exp = exp; if (not(attr is", "next_last = 'up' if up.index(child) is 0 else '' next_indent = '{0}{1}{2}'.format(indent, '", "= '┌' elif last == 'down': start_shape = '└' elif last == 'updown':", "__repr__(self): return self.nome def __evaluate__(self): for child in self.children: if (child != None):", "* len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current node. \"\"\" if last", "+ child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) +", "+ 1 for child in no.children: i = 0 for i in range(0,count):", "self.nome def __evaluate__(self): for child in self.children: if (child != None): return child.__evaluate__()", "self.children.append(right) self.left = left self.token = self.op = op self.right = right def", "in self.children: if (child != None): ret += child.__str__(level+1) #level+1 return ret def", "exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um nó do tipo If.') if(not(exp is", "== 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome", "ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count,", "ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n')", "Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó do tipo Num.') #self.children.append(token) self.token", "child in up: next_last = 'up' if up.index(child) is 0 else '' next_indent", "last == 'down': start_shape = '└' elif last == 'updown': start_shape = '", "' + str(op)) class ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left,", "arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'): return", "\"up\" branch. \"\"\" for child in up: next_last = 'up' if up.index(child) is", "down: next_last = 'down' if down.index(child) is len(down) - 1 else '' next_indent", "__init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.') if(not(left is", "__init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo Block.') #self.children = [] def __repr__(self):", "= exp; self.commands = commands; def __repr__(self): return self.nome class For(AST): def __init__(self,", "of current node. \"\"\" if last == 'up': start_shape = '┌' elif last", "def __repr__(self): return self.nome class If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If')", "token): AST.__init__(self,'Num') print('Criando um nó do tipo Num.') #self.children.append(token) self.token = token self.value", "= open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for child in no.children: i =", "__repr__(self): return self.nome class Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um", "left self.op = op self.right = right def __repr__(self): #self.left.repr(); return self.op class", "Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó do tipo ArithOp com operador '", "elif(no.nome == 'Num'): return ' value=\\'' + no.token.getLexema() + ' type:\\'' + no.value", "def __repr__(self): return self.nome class For(AST): def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For')", "= '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│', \" \" *", "'' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│', \"", "def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.') if(not(left", "tipo While.') if(not(exp is None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp =", "if(not(id_ is None)): self.children.append(id_) self.id = id_; def __repr__(self): return self.nome class Print(AST):", "return ' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return ' value=\\''", "\" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current node. \"\"\" if", "__init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó do tipo While.') if(not(exp is None)):", "op, left, right) print('Criando um nó do tipo ArithOp com operador ' +", "lists for \"up\" branch and \"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda node:", "arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def deepSearch(", "None #tipo do nó. Compound, Assign, ArithOp, etc self.value = None def __str__(self,", "end_shape = '┤' elif down: end_shape = '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent,", "= id_; def __repr__(self): return self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando", "branch. \"\"\" for child in down: next_last = 'down' if down.index(child) is len(down)", "lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return ' value=\\'' + no.token.getLexema()", "operador ' + str(op)) class Id(AST): \"\"\"The Var node is constructed out of", "if (child != None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ... END'", "if last == 'up': start_shape = '┌' elif last == 'down': start_shape =", "tipo Block.') #self.children = [] def __repr__(self): return self.nome class Assign(AST): def __init__(self,", "For(AST): def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó do tipo", "print('Criando um nó do tipo Id.') #self.children.append(token) self.token = token self.value = token.value", "start_shape = '├' if up: end_shape = '┤' elif down: end_shape = '┐'", "AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.op", "def __init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo Block.') #self.children = [] def", "+ '\\'' elif(no.nome == 'Num'): return ' value=\\'' + no.token.getLexema() + ' type:\\''", "= \"\\t\"*level+ repr(self) +\"\\n\" for child in self.children: if (child != None): ret", "op self.right = right def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self,", "'┌' elif last == 'down': start_shape = '└' elif last == 'updown': start_shape", "um nó do tipo For.') if (not(attr is None)): self.children.append(attr) self.attr = attr", "nó do tipo LogicalOp com operador ' + str(op)) class ArithOp(Expr): def __init__(self,", "um nó do tipo LogicalOp com operador ' + str(op)) class ArithOp(Expr): def", "self.exp = exp; def __repr__(self): return self.nome class Expr(AST): def __init__(self, nome, op,", "return self.nome def __evaluate__(self): for child in self.children: if (child != None): return", "is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp =", "{child: nb_children(child) for child in current_node.children} \"\"\" Creation of balanced lists for \"up\"", "print('Criando um nó do tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is None)):", "do tipo While.') if(not(exp is None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp", "1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n')", "Assign.') if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.token", "Id(AST): \"\"\"The Var node is constructed out of ID token.\"\"\" def __init__(self, token):", "start_shape = '┌' elif last == 'down': start_shape = '└' elif last ==", "= op self.right = right def __repr__(self): return self.nome class If(AST): def __init__(self,", "commands; def __repr__(self): return self.nome class For(AST): def __init__(self, attr, exp, attr2, commands):", "self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp = exp; self.c_true", "attr2 if(not (commands is None)): self.children.append(commands) self.commands = commands def __repr__(self): return self.nome", "= c_true; self.c_false = c_false; def __repr__(self): return self.nome class While(AST): def __init__(self,", "import Token class AST(object): def __init__(self, nome): self.nome = nome; self.children = []", "= exp; def __repr__(self): return self.nome class Expr(AST): def __init__(self, nome, op, left,", "self.children.append(commands) self.commands = commands def __repr__(self): return self.nome class Read(AST): def __init__(self, id_):", "last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod", "child in no.children: i = 0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome ==", "left, op, right): AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.') if(not(left is None)):", "Var node is constructed out of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando", "class If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um nó do", "None def __str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for child in self.children:", "' ' else: start_shape = '├' if up: end_shape = '┤' elif down:", "self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp = exp; self.c_true = c_true; self.c_false =", "== 'RelOp' or no.nome == 'LogicalOp'): return ' op=\\'' + no.op + '\\''", "== 'Id'): return ' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return", "self.children.append(id_) self.id = id_; def __repr__(self): return self.nome class Print(AST): def __init__(self, exp):", "ArithOp, etc self.value = None def __str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\"", "While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó do tipo While.') if(not(exp", "self.children.append(exp) self.exp = exp; def __repr__(self): return self.nome class Expr(AST): def __init__(self, nome,", "tipo For.') if (not(attr is None)): self.children.append(attr) self.attr = attr if(not(exp is None)):", "\"\"\" for child in up: next_last = 'up' if up.index(child) is 0 else", "key=lambda node: nb_children(node)) down = [] while up and sum(size_branch[node] for node in", "arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for child in no.children:", "None)): self.children.append(right) self.left = left self.token = self.op = op self.right = right", "AST.__init__(self,'Num') print('Criando um nó do tipo Num.') #self.children.append(token) self.token = token self.value =", "else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else '│',", "level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for child in self.children: if (child !=", "None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp = exp;", "last='updown'): nb_children = lambda node: sum(nb_children(child) for child in node.children) + 1 size_branch", "== 'ArithOp' or no.nome == 'RelOp' or no.nome == 'LogicalOp'): return ' op=\\''", "+ '/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML)", "print('Criando um nó do tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id = id_;", "is len(down) - 1 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down'", "um nó do tipo RelOp com operador ' + str(op)) class Id(AST): \"\"\"The", "child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' +", "None)): self.children.append(exp) self.exp = exp; if (not(attr is None)): self.children.append(attr2) self.attr2 = attr2", "exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó do tipo For.') if (not(attr is", "def classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome", "\"\"\" if last == 'up': start_shape = '┌' elif last == 'down': start_shape", "1 size_branch = {child: nb_children(child) for child in current_node.children} \"\"\" Creation of balanced", "no.nome == 'RelOp' or no.nome == 'LogicalOp'): return ' op=\\'' + no.op +", "if(not(exp is None)): self.children.append(exp) if(not (commands is None)): self.children.append(commands) self.exp = exp; self.commands", "'│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no):", "+ str(op)) class ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left, right)", "#self.children.append(token) self.token = token self.value = token.value #em python, não precisamos nos preocupar", "range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod", "nb_children = lambda node: sum(nb_children(child) for child in node.children) + 1 size_branch =", "= commands def __repr__(self): return self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando", "' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return ' value=\\'' +", "for child in no.children: i = 0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome", "self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.token = self.op = op", "um nó do tipo Block.') #self.children = [] def __repr__(self): return self.nome class", "print('Criando um nó do tipo Block.') #self.children = [] def __repr__(self): return self.nome", "last=next_last) \"\"\" Printing of current node. \"\"\" if last == 'up': start_shape =", "< sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\"", "is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.token = self.op", "ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count,", "no.token.getLexema() + ' type:\\'' + no.value + '\\'' elif(no.nome == 'ArithOp' or no.nome", "'down': start_shape = '└' elif last == 'updown': start_shape = ' ' else:", "\"\"\"Represents a 'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó do", "None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.op = op self.right", "i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome", "self.children = [] self.tipo = None #tipo do nó. Compound, Assign, ArithOp, etc", "= exp; if (not(attr is None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands is", "'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome +", "__repr__(self): return self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do", "\"\"\" for child in down: next_last = 'down' if down.index(child) is len(down) -", "range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'):", "c_true; self.c_false = c_false; def __repr__(self): return self.nome class While(AST): def __init__(self, exp,", "self.children.append(right) self.left = left self.op = op self.right = right def __repr__(self): #self.left.repr();", "for child in current_node.children} \"\"\" Creation of balanced lists for \"up\" branch and", "ToXML: @staticmethod def toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML =", "str(op)) class Id(AST): \"\"\"The Var node is constructed out of ID token.\"\"\" def", "- 1 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last", "range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome +", "self.op = op self.right = right def __repr__(self): return self.nome class If(AST): def", "com operador ' + str(op)) class ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp',", "else: start_shape = '├' if up: end_shape = '┤' elif down: end_shape =", "o tipo de value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value def", "'' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch. \"\"\" for child", "[] while up and sum(size_branch[node] for node in down) < sum(size_branch[node] for node", "nome): self.nome = nome; self.children = [] self.tipo = None #tipo do nó.", "exp; def __repr__(self): return self.nome class Expr(AST): def __init__(self, nome, op, left, right):", "id_): AST.__init__(self,'Read') print('Criando um nó do tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id", "do tipo ArithOp com operador ' + str(op)) class RelOp(Expr): def __init__(self, left,", "if(not(right is None)): self.children.append(right) self.left = left self.op = op self.right = right", "do tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp = exp; def __repr__(self): return", "'┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\"", "class Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um nó do tipo", "+\"\\n\" for child in self.children: if (child != None): ret += child.__str__(level+1) #level+1", "Read.') if(not(id_ is None)): self.children.append(id_) self.id = id_; def __repr__(self): return self.nome class", "#level+1 return ret def __repr__(self): return self.nome def __evaluate__(self): for child in self.children:", "down: end_shape = '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\"", "+ ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child,", "is None)): self.children.append(c_false) self.exp = exp; self.c_true = c_true; self.c_false = c_false; def", "balanced lists for \"up\" branch and \"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda", "self.exp = exp; self.commands = commands; def __repr__(self): return self.nome class For(AST): def", "nos preocupar com o tipo de value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self):", "= exp; self.c_true = c_true; self.c_false = c_false; def __repr__(self): return self.nome class", "self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó do tipo Num.')", "= [] while up and sum(size_branch[node] for node in down) < sum(size_branch[node] for", "self.nome = nome; self.children = [] self.tipo = None #tipo do nó. Compound,", "is None)): self.children.append(commands) self.commands = commands def __repr__(self): return self.nome class Read(AST): def", "else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│',", "nó. Compound, Assign, ArithOp, etc self.value = None def __str__(self, level=0): ret =", "if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.token =", "self.children: if (child != None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ...", "if up.index(child) is 0 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up'", "class ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um", "def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó do tipo Id.') #self.children.append(token) self.token =", "Print.') if(not(exp is None)): self.children.append(exp) self.exp = exp; def __repr__(self): return self.nome class", "preocupar com o tipo de value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return", "== 'up': start_shape = '┌' elif last == 'down': start_shape = '└' elif", "def deepSearch( no, count,arvoreToXML): count = count + 1 for child in no.children:", "= '┤' elif down: end_shape = '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape,", "+ 1 size_branch = {child: nb_children(child) for child in current_node.children} \"\"\" Creation of", "Assign, ArithOp, etc self.value = None def __str__(self, level=0): ret = \"\\t\"*level+ repr(self)", "não precisamos nos preocupar com o tipo de value def __repr__(self): return repr(self.token.getLexema())", "= token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST): def", "self.commands = commands; def __repr__(self): return self.nome class For(AST): def __init__(self, attr, exp,", "sum(size_branch[node] for node in down) < sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\"", "@staticmethod def toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w')", "self.token = token self.value = token.value #em python, não precisamos nos preocupar com", "= sorted(current_node.children, key=lambda node: nb_children(node)) down = [] while up and sum(size_branch[node] for", "' value=\\'' + no.token.getLexema() + ' type:\\'' + no.value + '\\'' elif(no.nome ==", "self.left = left self.op = op self.right = right def __repr__(self): #self.left.repr(); return", "constructed out of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó do", "arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child)", "len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no): count = 1 arvoreToXML", "child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n')", "up and sum(size_branch[node] for node in down) < sum(size_branch[node] for node in up):", "Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.')", "... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo Block.') #self.children", "= 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome +", "i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome +", "node. \"\"\" if last == 'up': start_shape = '┌' elif last == 'down':", "print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child) for child in node.children) +", "def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó do", "+ str(op)) class RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left, right)", "self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do tipo Print.')", "return self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do tipo", "and sum(size_branch[node] for node in down) < sum(size_branch[node] for node in up): down.append(up.pop())", "= token self.value = token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value", "def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child)", "or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<'", "AST.__init__(self,'While') print('Criando um nó do tipo While.') if(not(exp is None)): self.children.append(exp) if(not (commands", "elif down: end_shape = '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape))", "None)): self.children.append(c_false) self.exp = exp; self.c_true = c_true; self.c_false = c_false; def __repr__(self):", "up: next_last = 'up' if up.index(child) is 0 else '' next_indent = '{0}{1}{2}'.format(indent,", "nó do tipo Block.') #self.children = [] def __repr__(self): return self.nome class Assign(AST):", "left, op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó do tipo RelOp", "Token import Token class AST(object): def __init__(self, nome): self.nome = nome; self.children =", "in down: next_last = 'down' if down.index(child) is len(down) - 1 else ''", "is None)): self.children.append(commands) self.exp = exp; self.commands = commands; def __repr__(self): return self.nome", "right def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self, op, left, right):", "= left self.token = self.op = op self.right = right def __repr__(self): return", "return self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do tipo", "if 'down' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last)", "um nó do tipo Assign.') if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right)", "count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</'", "+ ' type:\\'' + no.value + '\\'' elif(no.nome == 'ArithOp' or no.nome ==", "'\\'' elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or no.nome == 'LogicalOp'): return", "attr2, commands): AST.__init__(self,'For') print('Criando um nó do tipo For.') if (not(attr is None)):", "= lambda node: sum(nb_children(child) for child in node.children) + 1 size_branch = {child:", "no, count,arvoreToXML): count = count + 1 for child in no.children: i =", "type:\\'' + no.value + '\\'' elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or", "__init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó do tipo For.') if", "def __init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is", "node is constructed out of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um", "* len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no): count = 1", "return self.nome class Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um nó", "if(not (commands is None)): self.children.append(commands) self.commands = commands def __repr__(self): return self.nome class", "is None)): self.children.append(exp) self.exp = exp; def __repr__(self): return self.nome class Expr(AST): def", "#self.children.append(token) self.token = token self.value = token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self):", "do tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is", "left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó do tipo ArithOp com", "arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for child in no.children: i", "arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def", "end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch. \"\"\"", "Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do tipo Read.') if(not(id_ is", "__init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um nó do tipo If.') if(not(exp", "self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do tipo Read.')", "= 0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome ==", "self.token = token self.value = token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return", "self.children.append(commands) self.exp = exp; self.commands = commands; def __repr__(self): return self.nome class For(AST):", "= '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch. \"\"\" for", "in up): down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\" for child in up:", "self.nome class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó do tipo", "len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current node. \"\"\" if last ==", "class While(AST): def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó do tipo While.')", "None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\" def __init__(self):", "last == 'up': start_shape = '┌' elif last == 'down': start_shape = '└'", "= [] def __repr__(self): return self.nome class Assign(AST): def __init__(self, left, op, right):", "start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch. \"\"\" for child in down:", "+ '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count = count + 1 for", "return repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda", "self.right = right def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self, op,", "arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count = count", "attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó do tipo For.') if (not(attr", "do nó. Compound, Assign, ArithOp, etc self.value = None def __str__(self, level=0): ret", "+ child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count):", "'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome +", "child in down: next_last = 'down' if down.index(child) is len(down) - 1 else", "if (not(attr is None)): self.children.append(attr) self.attr = attr if(not(exp is None)): self.children.append(exp) self.exp", "__repr__(self): return self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do", "'>\\r\\n') for child in no.children: i = 0 for i in range(0,count): arvoreToXML.write('\\t')", "'up' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\"", "tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp = exp; def __repr__(self): return self.nome", "return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block')", "exp, commands): AST.__init__(self,'While') print('Criando um nó do tipo While.') if(not(exp is None)): self.children.append(exp)", "= op self.right = right def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def", "self.children.append(exp) self.exp = exp; if (not(attr is None)): self.children.append(attr2) self.attr2 = attr2 if(not", "self.c_false = c_false; def __repr__(self): return self.nome class While(AST): def __init__(self, exp, commands):", "' ' if 'up' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child,", "def __str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for child in self.children: if", "op, right): AST.__init__(self,'Assign'); print('Criando um nó do tipo Assign.') if(not(left is None)): self.children.append(left)", "left, right) print('Criando um nó do tipo RelOp com operador ' + str(op))", "= '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of", "child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n')", "python, não precisamos nos preocupar com o tipo de value def __repr__(self): return", "def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do tipo Print.') if(not(exp is None)):", "' if 'down' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent,", "do tipo For.') if (not(attr is None)): self.children.append(attr) self.attr = attr if(not(exp is", "0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome == 'Num'):", "@staticmethod def classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\'' + no.token.getLexema() + '\\''", "' ' if 'down' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child,", "'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else:", "return repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando", "for node in down) < sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\" Printing", "__init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do tipo Read.') if(not(id_ is None)): self.children.append(id_)", "com o tipo de value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value", "class ToXML: @staticmethod def toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML", "def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó do", "self.exp = exp; if (not(attr is None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands", "+ no.nome + '>\\r\\n') for child in no.children: i = 0 for i", "exp; self.c_true = c_true; self.c_false = c_false; def __repr__(self): return self.nome class While(AST):", "None)): self.children.append(commands) self.commands = commands def __repr__(self): return self.nome class Read(AST): def __init__(self,", "com operador ' + str(op)) class Id(AST): \"\"\"The Var node is constructed out", "AST.__init__(self,'For') print('Criando um nó do tipo For.') if (not(attr is None)): self.children.append(attr) self.attr", "right def __repr__(self): return self.nome class If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self,", "\"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node)) down = [] while up and", "operador ' + str(op)) class RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op,", "' type:\\'' + no.value + '\\'' elif(no.nome == 'ArithOp' or no.nome == 'RelOp'", "\"up\" branch and \"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node)) down", "+ child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) +", "\" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no): count", "up.index(child) is 0 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in", "None)): self.children.append(c_true) if(not(c_false is None)): self.children.append(c_false) self.exp = exp; self.c_true = c_true; self.c_false", "self.nome class For(AST): def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó", "== 'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n')", "class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do tipo Print.') if(not(exp", "child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando", "' if 'up' in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent,", "open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for child in no.children: i = 0", "op, left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó do tipo ArithOp", "lambda node: sum(nb_children(child) for child in node.children) + 1 size_branch = {child: nb_children(child)", "if (not(attr is None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands is None)): self.children.append(commands)", "child in self.children: if (child != None): ret += child.__str__(level+1) #level+1 return ret", "is None)): self.children.append(id_) self.id = id_; def __repr__(self): return self.nome class Print(AST): def", "end_shape = '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing", "1 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'down' in last else", "+ child.nome + '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\''", "precisamos nos preocupar com o tipo de value def __repr__(self): return repr(self.token.getLexema()) def", "print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no): count = 1 arvoreToXML =", "node: nb_children(node)) down = [] while up and sum(size_branch[node] for node in down)", "tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false is None)):", "'updown': start_shape = ' ' else: start_shape = '├' if up: end_shape =", "print('Criando um nó do tipo LogicalOp com operador ' + str(op)) class ArithOp(Expr):", "for \"up\" branch and \"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node))", "= {child: nb_children(child) for child in current_node.children} \"\"\" Creation of balanced lists for", "exp; if (not(attr is None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands is None)):", "self.op = op self.right = right def __repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr):", "def __evaluate__(self): return self.value class Num(AST): def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó", "no.value + '\\'' elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or no.nome ==", "do tipo Num.') #self.children.append(token) self.token = token self.value = token.value #em python, não", "last=next_last) class ToXML: @staticmethod def toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close()", "in self.children: if (child != None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN", "= '├' if up: end_shape = '┤' elif down: end_shape = '┐' else:", "do tipo Id.') #self.children.append(token) self.token = token self.value = token.value def __repr__(self): return", "self.children.append(c_false) self.exp = exp; self.c_true = c_true; self.c_false = c_false; def __repr__(self): return", "current_node.children} \"\"\" Creation of balanced lists for \"up\" branch and \"down\" branch. \"\"\"", "last == 'updown': start_shape = ' ' else: start_shape = '├' if up:", "None): ret += child.__str__(level+1) #level+1 return ret def __repr__(self): return self.nome def __evaluate__(self):", "exp; self.commands = commands; def __repr__(self): return self.nome class For(AST): def __init__(self, attr,", "def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um nó do tipo For.')", "'/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for", "+ '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count", "size_branch = {child: nb_children(child) for child in current_node.children} \"\"\" Creation of balanced lists", "down = [] while up and sum(size_branch[node] for node in down) < sum(size_branch[node]", "print('Criando um nó do tipo ArithOp com operador ' + str(op)) class RelOp(Expr):", "'Num'): return ' value=\\'' + no.token.getLexema() + ' type:\\'' + no.value + '\\''", "right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left", "um nó do tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id = id_; def", "__init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is None)):", "(child != None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\"", "next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│', \" \"", "def __evaluate__(self): for child in self.children: if (child != None): return child.__evaluate__() class", "def __repr__(self): return self.nome class Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando", "right) print('Criando um nó do tipo RelOp com operador ' + str(op)) class", "tipo ArithOp com operador ' + str(op)) class RelOp(Expr): def __init__(self, left, op,", "child in node.children) + 1 size_branch = {child: nb_children(child) for child in current_node.children}", "elif last == 'updown': start_shape = ' ' else: start_shape = '├' if", "AST.__init__(self,'Block') print('Criando um nó do tipo Block.') #self.children = [] def __repr__(self): return", "nó do tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true) if(not(c_false", "token.value #em python, não precisamos nos preocupar com o tipo de value def", "indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child) for child in node.children) + 1", "'/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for", "None)): self.children.append(right) self.left = left self.op = op self.right = right def __repr__(self):", "up): down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\" for child in up: next_last", "= '└' elif last == 'updown': start_shape = ' ' else: start_shape =", "no.nome + '>\\r\\n') for child in no.children: i = 0 for i in", "node in down) < sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\" Printing of", "start_shape = ' ' else: start_shape = '├' if up: end_shape = '┤'", "in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n')", "def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST): def __init__(self, token):", "class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó do tipo Read.') if(not(id_", "down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\" for child in up: next_last =", "no.children: i = 0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or", "+ '>\\r\\n') @staticmethod def classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\'' + no.token.getLexema()", "if(not(c_false is None)): self.children.append(c_false) self.exp = exp; self.c_true = c_true; self.c_false = c_false;", "def __repr__(self): return self.nome class Expr(AST): def __init__(self, nome, op, left, right): AST.__init__(self,nome)", "self.op class LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando", "== 'Num'): return ' value=\\'' + no.token.getLexema() + ' type:\\'' + no.value +", "+ no.value + '\\'' elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or no.nome", "if(not (commands is None)): self.children.append(commands) self.exp = exp; self.commands = commands; def __repr__(self):", "__repr__(self): return self.nome class If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando", "return self.op class LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left, right)", "\"down\" branch. \"\"\" for child in down: next_last = 'down' if down.index(child) is", "if(child.nome == 'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) +", "else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i", "__init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó do tipo", "\"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node)) down = [] while", "= attr2 if(not (commands is None)): self.children.append(commands) self.commands = commands def __repr__(self): return", "'│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current node.", "exp): AST.__init__(self,'Print') print('Criando um nó do tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp", "+ ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child,", "ret def __repr__(self): return self.nome def __evaluate__(self): for child in self.children: if (child", "= token self.value = token.value #em python, não precisamos nos preocupar com o", "tipo Id.') #self.children.append(token) self.token = token self.value = token.value def __repr__(self): return repr(self.token.getLexema())", "next_last = 'down' if down.index(child) is len(down) - 1 else '' next_indent =", "'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else:", "child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t')", "token self.value = token.value #em python, não precisamos nos preocupar com o tipo", "+ child.nome + '>\\r\\n') arvoreToXML.write('</' + no.nome + '>\\r\\n') @staticmethod def deepSearch( no,", "__init__(self, op, left, right): Expr.__init__(self,'ArithOp', op, left, right) print('Criando um nó do tipo", "count + 1 for child in no.children: i = 0 for i in", "Compound, Assign, ArithOp, etc self.value = None def __str__(self, level=0): ret = \"\\t\"*level+", "print('Criando um nó do tipo While.') if(not(exp is None)): self.children.append(exp) if(not (commands is", "Printing of \"up\" branch. \"\"\" for child in up: next_last = 'up' if", "+ '>\\r\\n') for child in no.children: i = 0 for i in range(0,count):", "token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó do tipo Id.') #self.children.append(token) self.token", "If(AST): def __init__(self, exp, c_true, c_false): AST.__init__(self, 'If') print('Criando um nó do tipo", "and \"down\" branch. \"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node)) down = []", "return self.nome class Expr(AST): def __init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left is", "self.nome class Expr(AST): def __init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left is None)):", "um nó do tipo If.') if(not(exp is None)): self.children.append(exp) if(not(c_true is None)): self.children.append(c_true)", "tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id = id_; def __repr__(self): return self.nome", "\"\"\" Creation of balanced lists for \"up\" branch and \"down\" branch. \"\"\" up", "commands def __repr__(self): return self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um", "com operador ' + str(op)) class RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp',", "child in self.children: if (child != None): return child.__evaluate__() class Compound(AST): \"\"\"Represents a", "for node in up): down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\" for child", "\"\"\" Printing of \"down\" branch. \"\"\" for child in down: next_last = 'down'", "Printing of \"down\" branch. \"\"\" for child in down: next_last = 'down' if", "self.children.append(attr2) self.attr2 = attr2 if(not (commands is None)): self.children.append(commands) self.commands = commands def", "AST.__init__(self,'Read') print('Criando um nó do tipo Read.') if(not(id_ is None)): self.children.append(id_) self.id =", "__init__(self, token): AST.__init__(self,'Num') print('Criando um nó do tipo Num.') #self.children.append(token) self.token = token", "arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/>\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child)", "of ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó do tipo Id.')", "__str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for child in self.children: if (child", "'>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome +", "Num.') #self.children.append(token) self.token = token self.value = token.value #em python, não precisamos nos", "count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod", "1 for child in no.children: i = 0 for i in range(0,count): arvoreToXML.write('\\t')", "self.value def print_tree(current_node, indent=\"\", last='updown'): nb_children = lambda node: sum(nb_children(child) for child in", "== 'down': start_shape = '└' elif last == 'updown': start_shape = ' '", "Token class AST(object): def __init__(self, nome): self.nome = nome; self.children = [] self.tipo", "AST.__init__(self,'Id') print('Criando um nó do tipo Id.') #self.children.append(token) self.token = token self.value =", "right) print('Criando um nó do tipo LogicalOp com operador ' + str(op)) class", "RelOp(Expr): def __init__(self, left, op, right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó", "Block.') #self.children = [] def __repr__(self): return self.nome class Assign(AST): def __init__(self, left,", "left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó do tipo LogicalOp com", "def __init__(self, token): AST.__init__(self,'Num') print('Criando um nó do tipo Num.') #self.children.append(token) self.token =", "branch. \"\"\" up = sorted(current_node.children, key=lambda node: nb_children(node)) down = [] while up", "0 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else", "in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing", "'┤' elif down: end_shape = '┐' else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(),", "up = sorted(current_node.children, key=lambda node: nb_children(node)) down = [] while up and sum(size_branch[node]", "for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</' + child.nome + '>\\r\\n') @staticmethod def classifierPrint(no):", "nome; self.children = [] self.tipo = None #tipo do nó. Compound, Assign, ArithOp,", "'{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│', \" \" * len(current_node.__repr__()))", "None)): self.children.append(exp) self.exp = exp; def __repr__(self): return self.nome class Expr(AST): def __init__(self,", "#em python, não precisamos nos preocupar com o tipo de value def __repr__(self):", "tipo Num.') #self.children.append(token) self.token = token self.value = token.value #em python, não precisamos", "current node. \"\"\" if last == 'up': start_shape = '┌' elif last ==", "self.nome class Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign'); print('Criando um nó do", "def __repr__(self): return self.nome class Read(AST): def __init__(self, id_): AST.__init__(self,'Read') print('Criando um nó", "id_; def __repr__(self): return self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um", "nó do tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp = exp; def __repr__(self):", "class Compound(AST): \"\"\"Represents a 'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um", "sum(size_branch[node] for node in up): down.append(up.pop()) \"\"\" Printing of \"up\" branch. \"\"\" for", "'' next_indent = '{0}{1}{2}'.format(indent, ' ' if 'up' in last else '│', \"", "class Expr(AST): def __init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left)", "tipo RelOp com operador ' + str(op)) class Id(AST): \"\"\"The Var node is", "de value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value def print_tree(current_node, indent=\"\",", "else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML: @staticmethod def", "__repr__(self): #self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op,", "operador ' + str(op)) class ArithOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'ArithOp', op,", "= token.value #em python, não precisamos nos preocupar com o tipo de value", "ret += child.__str__(level+1) #level+1 return ret def __repr__(self): return self.nome def __evaluate__(self): for", "class LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um", "if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.op =", "arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '/\\r\\n') else: arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child)", "token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value class Num(AST): def __init__(self,", "'Id'): return ' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome == 'Num'): return '", "__repr__(self): return self.nome class Expr(AST): def __init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left", "else: end_shape = '' print('{0}{1}{2}{3}'.format(indent, start_shape, current_node.__repr__(), end_shape)) \"\"\" Printing of \"down\" branch.", "\"\"\" Printing of current node. \"\"\" if last == 'up': start_shape = '┌'", "(child != None): ret += child.__str__(level+1) #level+1 return ret def __repr__(self): return self.nome", "a 'BEGIN ... END' block\"\"\" def __init__(self): AST.__init__(self,'Block') print('Criando um nó do tipo", "= 'down' if down.index(child) is len(down) - 1 else '' next_indent = '{0}{1}{2}'.format(indent,", "@staticmethod def deepSearch( no, count,arvoreToXML): count = count + 1 for child in", "None)): self.children.append(commands) self.exp = exp; self.commands = commands; def __repr__(self): return self.nome class", "'└' elif last == 'updown': start_shape = ' ' else: start_shape = '├'", "RelOp com operador ' + str(op)) class Id(AST): \"\"\"The Var node is constructed", "+ str(op)) class Id(AST): \"\"\"The Var node is constructed out of ID token.\"\"\"", "elif(no.nome == 'ArithOp' or no.nome == 'RelOp' or no.nome == 'LogicalOp'): return '", "token self.value = token.value def __repr__(self): return repr(self.token.getLexema()) def __evaluate__(self): return self.value class", "self.c_true = c_true; self.c_false = c_false; def __repr__(self): return self.nome class While(AST): def", "self.token = self.op = op self.right = right def __repr__(self): return self.nome class", "print('Criando um nó do tipo For.') if (not(attr is None)): self.children.append(attr) self.attr =", "no.nome + '>\\r\\n') @staticmethod def deepSearch( no, count,arvoreToXML): count = count + 1", "um nó do tipo While.') if(not(exp is None)): self.children.append(exp) if(not (commands is None)):", "__init__(self, token): AST.__init__(self,'Id') print('Criando um nó do tipo Id.') #self.children.append(token) self.token = token", "def __init__(self, exp, commands): AST.__init__(self,'While') print('Criando um nó do tipo While.') if(not(exp is", "Expr(AST): def __init__(self, nome, op, left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right", "token): AST.__init__(self,'Id') print('Criando um nó do tipo Id.') #self.children.append(token) self.token = token self.value", "i = 0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome", "i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome == 'Num'): arvoreToXML.write('<' +", "in current_node.children} \"\"\" Creation of balanced lists for \"up\" branch and \"down\" branch.", "+ ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in range(0,count): arvoreToXML.write('\\t') arvoreToXML.write('</'", "commands): AST.__init__(self,'While') print('Criando um nó do tipo While.') if(not(exp is None)): self.children.append(exp) if(not", "Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do tipo Print.') if(not(exp is", "right): Expr.__init__(self,'RelOp', op, left, right) print('Criando um nó do tipo RelOp com operador", "in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id' or child.nome == 'Num'): arvoreToXML.write('<' + child.nome", "LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left, right) print('Criando um nó", "return self.nome class For(AST): def __init__(self, attr, exp, attr2, commands): AST.__init__(self,'For') print('Criando um", "left, right): AST.__init__(self,nome) if(not(left is None)): self.children.append(left) if(not(right is None)): self.children.append(right) self.left =", "nó do tipo Id.') #self.children.append(token) self.token = token self.value = token.value def __repr__(self):", "arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<' + no.nome + '>\\r\\n') for", "else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current", "in last else '│', \" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) class ToXML:", "attr if(not(exp is None)): self.children.append(exp) self.exp = exp; if (not(attr is None)): self.children.append(attr2)", "def __repr__(self): return self.nome class Print(AST): def __init__(self, exp): AST.__init__(self,'Print') print('Criando um nó", "__init__(self, exp): AST.__init__(self,'Print') print('Criando um nó do tipo Print.') if(not(exp is None)): self.children.append(exp)", "arvoreToXML.write('<' + child.nome + ToXML.classifierPrint(child) + '>\\r\\n') ToXML.deepSearch(child, count, arvoreToXML) for i in", "classifierPrint(no): if(no.nome == 'Id'): return ' lexema=\\'' + no.token.getLexema() + '\\'' elif(no.nome ==", "down.index(child) is len(down) - 1 else '' next_indent = '{0}{1}{2}'.format(indent, ' ' if", "count,arvoreToXML): count = count + 1 for child in no.children: i = 0", "[] def __repr__(self): return self.nome class Assign(AST): def __init__(self, left, op, right): AST.__init__(self,'Assign');", "indent=next_indent, last=next_last) class ToXML: @staticmethod def toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w')", "if (child != None): ret += child.__str__(level+1) #level+1 return ret def __repr__(self): return", "AST.__init__(self,'Print') print('Criando um nó do tipo Print.') if(not(exp is None)): self.children.append(exp) self.exp =", "def toXML(no): count = 1 arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.close() arvoreToXML = open('../../tp2/output/arvoreToXML.txt','w') arvoreToXML.write('<'", "nó do tipo For.') if (not(attr is None)): self.children.append(attr) self.attr = attr if(not(exp", "no.nome == 'LogicalOp'): return ' op=\\'' + no.op + '\\'' else: return ''", "#self.left.repr(); return self.op class LogicalOp(Expr): def __init__(self, op, left, right): Expr.__init__(self,'LogicalOp', op, left,", "self.value = None def __str__(self, level=0): ret = \"\\t\"*level+ repr(self) +\"\\n\" for child", "of balanced lists for \"up\" branch and \"down\" branch. \"\"\" up = sorted(current_node.children,", "sorted(current_node.children, key=lambda node: nb_children(node)) down = [] while up and sum(size_branch[node] for node", "in no.children: i = 0 for i in range(0,count): arvoreToXML.write('\\t') if(child.nome == 'Id'", "ID token.\"\"\" def __init__(self, token): AST.__init__(self,'Id') print('Criando um nó do tipo Id.') #self.children.append(token)", "(not(attr is None)): self.children.append(attr2) self.attr2 = attr2 if(not (commands is None)): self.children.append(commands) self.commands", "\"\"\" Printing of \"up\" branch. \"\"\" for child in up: next_last = 'up'", "um nó do tipo Id.') #self.children.append(token) self.token = token self.value = token.value def", "self.children.append(left) if(not(right is None)): self.children.append(right) self.left = left self.op = op self.right =", "\" \" * len(current_node.__repr__())) print_tree(child, indent=next_indent, last=next_last) \"\"\" Printing of current node. \"\"\"" ]
[ "name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ),", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ] operations", "), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2,", "name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name',", "'0001_initial'), ] operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept',", "field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20,", "dependencies = [ ('hrs', '0001_initial'), ] operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0,", "migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ),", "model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept',", "model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept',", "name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp',", "by Django 2.0.5 on 2018-05-23 01:23 from django.db import migrations, models class Migration(migrations.Migration):", "migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField( model_name='emp', name='mgr', field=models.IntegerField(blank=True, null=True),", "), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'),", "# Generated by Django 2.0.5 on 2018-05-23 01:23 from django.db import migrations, models", "serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField( model_name='emp',", "('hrs', '0001_initial'), ] operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField(", "model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True),", "name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no',", "), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField( model_name='emp', name='mgr', field=models.IntegerField(blank=True,", "01:23 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'),", "name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField( model_name='emp', name='mgr', field=models.IntegerField(blank=True, null=True), ), ]", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ]", "migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7,", "), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ),", "verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'),", "= [ ('hrs', '0001_initial'), ] operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'),", "Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ] operations = [ migrations.AddField( model_name='dept', name='excellent',", "2018-05-23 01:23 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('hrs',", "models class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ] operations = [ migrations.AddField(", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ] operations =", "verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False,", "[ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ),", "field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm',", "migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField(", "model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField( model_name='emp', name='mgr', field=models.IntegerField(blank=True, null=True), ),", "2.0.5 on 2018-05-23 01:23 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True,", "= [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'),", "field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField(", "class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ] operations = [ migrations.AddField( model_name='dept',", "field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField( model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True,", "] operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location',", "operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10,", "migrations, models class Migration(migrations.Migration): dependencies = [ ('hrs', '0001_initial'), ] operations = [", "Django 2.0.5 on 2018-05-23 01:23 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "[ ('hrs', '0001_initial'), ] operations = [ migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ),", "Generated by Django 2.0.5 on 2018-05-23 01:23 from django.db import migrations, models class", "model_name='dept', name='name', field=models.CharField(max_length=20, verbose_name='部门名称'), ), migrations.AlterField( model_name='dept', name='no', field=models.IntegerField(primary_key=True, serialize=False, verbose_name='部门编号'), ), migrations.AlterField(", "migrations.AddField( model_name='dept', name='excellent', field=models.BooleanField(default=0, verbose_name='是否优秀'), ), migrations.AlterField( model_name='dept', name='location', field=models.CharField(max_length=10, verbose_name='部门所在地'), ), migrations.AlterField(", "verbose_name='部门编号'), ), migrations.AlterField( model_name='emp', name='comm', field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True), ), migrations.AlterField( model_name='emp', name='mgr',", "on 2018-05-23 01:23 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [" ]
[ "\"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__()", "1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u,", "for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"],", "= 16 ammo_package_h = 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count %", "pyxel class ammoSpawner(): probability = 0 ammo_package_list = [] ammo_package_x = 0 ammo_package_y", "\"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0:", "= 0 ammo_package_list = [] ammo_package_x = 0 ammo_package_y = 0 ammo_package_img =", "= 0 ammo_package_img = 0 ammo_package_u = 48 ammo_package_v = 0 ammo_package_w =", "= 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0", "and pyxel.frame_count != 0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10,", "0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA),", "ammo_package_img = 0 ammo_package_u = 48 ammo_package_v = 0 ammo_package_w = 16 ammo_package_h", "> 0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"],", "import random import pyxel class ammoSpawner(): probability = 0 ammo_package_list = [] ammo_package_x", "ammo_package_h = 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count % 500 ==", "16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0 and", "self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index, ammo_box in enumerate(self.ammo_package_list):", "\"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self):", "48 ammo_package_v = 0 ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion = 0", "self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\":", "16 ammo_package_h = 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count % 500", "pyxel.frame_count % 500 == 0 and pyxel.frame_count != 0: if random.randint(0,1) == 1:", "ammo_package_list = [] ammo_package_x = 0 ammo_package_y = 0 ammo_package_img = 0 ammo_package_u", "drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"],", "% 500 == 0 and pyxel.frame_count != 0: if random.randint(0,1) == 1: self.ammo_package_list.append({", "ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0 and pyxel.frame_count", "class ammoSpawner(): probability = 0 ammo_package_list = [] ammo_package_x = 0 ammo_package_y =", "random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img,", "self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion", "0 and pyxel.frame_count != 0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA +", "<= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index) pyxel.play(0, 4) if(ammo_box['x']", "ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x'])", "= 0 ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self):", "ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if pyxel.frame_count", "if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index) pyxel.play(0,", "in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x']", "ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16:", "\"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v,", "= [] ammo_package_x = 0 ammo_package_y = 0 ammo_package_img = 0 ammo_package_u =", "ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16", "self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index, ammo_box", "if self.ammo_package_list.__len__() > 0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"],", "probability = 0 ammo_package_list = [] ammo_package_x = 0 ammo_package_y = 0 ammo_package_img", "enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -=", "ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] )", "\"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion })", "0 def spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0 and pyxel.frame_count != 0:", "[] ammo_package_x = 0 ammo_package_y = 0 ammo_package_img = 0 ammo_package_u = 48", "ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y'])", "self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() >", "-= 1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40", ") ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo", "10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\":", "self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def", "ammoSpawner(): probability = 0 ammo_package_list = [] ammo_package_x = 0 ammo_package_y = 0", "0 ammo_package_u = 48 ammo_package_v = 0 ammo_package_w = 16 ammo_package_h = 16", "spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0 and pyxel.frame_count != 0: if random.randint(0,1)", "abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index) pyxel.play(0, 4)", "== 0 and pyxel.frame_count != 0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA", "pyxel.frame_count != 0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\":", "16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index) pyxel.play(0, 4) if(ammo_box['x'] <", "ammo_package_y = 0 ammo_package_img = 0 ammo_package_u = 48 ammo_package_v = 0 ammo_package_w", "ammo_package_x = 0 ammo_package_y = 0 ammo_package_img = 0 ammo_package_u = 48 ammo_package_v", "= 0 def spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0 and pyxel.frame_count !=", "1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index)", "ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <=", "0 ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion = 0 def spawnAmmoInMap(self): if", "= 48 ammo_package_v = 0 ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion =", "random import pyxel class ammoSpawner(): probability = 0 ammo_package_list = [] ammo_package_x =", "\"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index,", "abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index) pyxel.play(0, 4) if(ammo_box['x'] < self.APP_X_MIN_PLAYABLE_AREA): self.ammo_package_list.pop(index)", "ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <=", "and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo += 40 self.ammo_package_list.pop(index) pyxel.play(0, 4) if(ammo_box['x'] < self.APP_X_MIN_PLAYABLE_AREA):", "ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16 and abs(self.player_y-ammo_box['y']) <= 16: self.player_ammo +=", "+ 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w,", "ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if", "<gh_stars>0 import random import pyxel class ammoSpawner(): probability = 0 ammo_package_list = []", "!= 0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA,", "ammo_package_u = 48 ammo_package_v = 0 ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion", "= 0 ammo_package_u = 48 ammo_package_v = 0 ammo_package_w = 16 ammo_package_h =", "index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"]", "def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"],", "0 ammo_package_list = [] ammo_package_x = 0 ammo_package_y = 0 ammo_package_img = 0", "self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for", "}) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt(", "import pyxel class ammoSpawner(): probability = 0 ammo_package_list = [] ammo_package_x = 0", "500 == 0 and pyxel.frame_count != 0: if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\":", "== 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\":", "self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\":", "ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1 if abs(self.player_x-ammo_box['x']) <= 16 and", "= 0 ammo_package_y = 0 ammo_package_img = 0 ammo_package_u = 48 ammo_package_v =", "ammo_package_v = 0 ammo_package_w = 16 ammo_package_h = 16 ammo_package_color_exclusion = 0 def", "def spawnAmmoInMap(self): if pyxel.frame_count % 500 == 0 and pyxel.frame_count != 0: if", "\"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h,", "if random.randint(0,1) == 1: self.ammo_package_list.append({ \"x\": self.APP_X_MAX_PLAYABLE_AREA + 10, \"y\": random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\":", "0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"],", "0 ammo_package_img = 0 ammo_package_u = 48 ammo_package_v = 0 ammo_package_w = 16", "self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if", "\"colkey\": self.ammo_package_color_exclusion }) def drawAmmoBox(self): if self.ammo_package_list.__len__() > 0: for index, ammo_box in", "random.randint(self.APP_Y_MIN_PLAYABLE_AREA, self.APP_Y_MAX_PLAYABLE_AREA), \"img\": self.ammo_package_img, \"u\": self.ammo_package_u, \"v\": self.ammo_package_v, \"w\": self.ammo_package_w, \"h\": self.ammo_package_h, \"colkey\":", "0 ammo_package_y = 0 ammo_package_img = 0 ammo_package_u = 48 ammo_package_v = 0", "pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"], ammo_box[\"v\"], ammo_box[\"w\"], ammo_box[\"h\"], ammo_box[\"colkey\"] ) ammo_box['x'] -= 1", "self.ammo_package_list.__len__() > 0: for index, ammo_box in enumerate(self.ammo_package_list): pyxel.blt( ammo_box[\"x\"], ammo_box[\"y\"], ammo_box[\"img\"], ammo_box[\"u\"],", "if pyxel.frame_count % 500 == 0 and pyxel.frame_count != 0: if random.randint(0,1) ==" ]
[ "recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request,", "the address the user has provided matches the signature if address != recover_to_addr(token,", "'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address } # try to get user", "[field.name for field in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and 'username' in", "for field in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields", "-> Optional[User]: # check if the address the user has provided matches the", "user = User(**{address_field: address}) fields = [field.name for field in User._meta.fields] if (", "settings from web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend):", "DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token, signature )", "{ f\"{address_field}__iexact\": address } # try to get user with provided data user", "from web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def", "= 'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token, signature ) ->", "User(**{address_field: address}) fields = [field.name for field in User._meta.fields] if ( address_field !=", "else: # get address field for the user model address_field = getattr( settings,", ") -> Optional[User]: # check if the address the user has provided matches", "kwargs = { f\"{address_field}__iexact\": address } # try to get user with provided", "import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate( self,", "address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address } #", "user if it does not exist user = User(**{address_field: address}) fields = [field.name", "ValueError('Wallet address does not match signature') else: # get address field for the", "= getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address } # try", "signature ) -> Optional[User]: # check if the address the user has provided", "request, address, token, signature ) -> Optional[User]: # check if the address the", "user = User.objects.filter(**kwargs).first() if user is None: # create the user if it", "<reponame>sneeu/django-web3-auth from typing import Optional from typing import Optional from django.contrib.auth import get_user_model,", "'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token, signature ) -> Optional[User]:", "self, request, address, token, signature ) -> Optional[User]: # check if the address", "address != recover_to_addr(token, signature): raise ValueError('Wallet address does not match signature') else: #", "user with provided data user = User.objects.filter(**kwargs).first() if user is None: # create", "django.conf import settings from web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username'", "if user is None: # create the user if it does not exist", "address field for the user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs", "= { f\"{address_field}__iexact\": address } # try to get user with provided data", "field in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields ):", "import Optional from typing import Optional from django.contrib.auth import get_user_model, backends from django.conf", "# create the user if it does not exist user = User(**{address_field: address})", "token, signature ) -> Optional[User]: # check if the address the user has", "with provided data user = User.objects.filter(**kwargs).first() if user is None: # create the", "address does not match signature') else: # get address field for the user", "has provided matches the signature if address != recover_to_addr(token, signature): raise ValueError('Wallet address", "user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address", "if ( address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields ): user.username = user.generate_username()", "!= DEFAULT_ADDRESS_FIELD and 'username' in fields ): user.username = user.generate_username() user.save() return user", "check if the address the user has provided matches the signature if address", "signature if address != recover_to_addr(token, signature): raise ValueError('Wallet address does not match signature')", "!= recover_to_addr(token, signature): raise ValueError('Wallet address does not match signature') else: # get", "DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address } # try to get user with", "not match signature') else: # get address field for the user model address_field", "get address field for the user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD)", "None: # create the user if it does not exist user = User(**{address_field:", "if it does not exist user = User(**{address_field: address}) fields = [field.name for", "get user with provided data user = User.objects.filter(**kwargs).first() if user is None: #", "address the user has provided matches the signature if address != recover_to_addr(token, signature):", "backends from django.conf import settings from web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD", "django.contrib.auth import get_user_model, backends from django.conf import settings from web3auth.utils import recover_to_addr User", "signature') else: # get address field for the user model address_field = getattr(", "typing import Optional from typing import Optional from django.contrib.auth import get_user_model, backends from", "matches the signature if address != recover_to_addr(token, signature): raise ValueError('Wallet address does not", "Optional from typing import Optional from django.contrib.auth import get_user_model, backends from django.conf import", "for the user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = {", "signature): raise ValueError('Wallet address does not match signature') else: # get address field", "address}) fields = [field.name for field in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD", "get_user_model, backends from django.conf import settings from web3auth.utils import recover_to_addr User = get_user_model()", "user has provided matches the signature if address != recover_to_addr(token, signature): raise ValueError('Wallet", "= get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token,", "from django.contrib.auth import get_user_model, backends from django.conf import settings from web3auth.utils import recover_to_addr", "authenticate( self, request, address, token, signature ) -> Optional[User]: # check if the", "match signature') else: # get address field for the user model address_field =", "if the address the user has provided matches the signature if address !=", "data user = User.objects.filter(**kwargs).first() if user is None: # create the user if", "provided matches the signature if address != recover_to_addr(token, signature): raise ValueError('Wallet address does", "from typing import Optional from typing import Optional from django.contrib.auth import get_user_model, backends", "not exist user = User(**{address_field: address}) fields = [field.name for field in User._meta.fields]", "settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address } # try to get", "try to get user with provided data user = User.objects.filter(**kwargs).first() if user is", "( address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields ): user.username = user.generate_username() user.save()", "raise ValueError('Wallet address does not match signature') else: # get address field for", "field for the user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs =", "get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token, signature", "is None: # create the user if it does not exist user =", "address } # try to get user with provided data user = User.objects.filter(**kwargs).first()", "user is None: # create the user if it does not exist user", "fields = [field.name for field in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and", "if address != recover_to_addr(token, signature): raise ValueError('Wallet address does not match signature') else:", "User.objects.filter(**kwargs).first() if user is None: # create the user if it does not", "import get_user_model, backends from django.conf import settings from web3auth.utils import recover_to_addr User =", "= User(**{address_field: address}) fields = [field.name for field in User._meta.fields] if ( address_field", "typing import Optional from django.contrib.auth import get_user_model, backends from django.conf import settings from", "to get user with provided data user = User.objects.filter(**kwargs).first() if user is None:", "from typing import Optional from django.contrib.auth import get_user_model, backends from django.conf import settings", "web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate(", "in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields ): user.username", "model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address }", "it does not exist user = User(**{address_field: address}) fields = [field.name for field", "address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields ): user.username = user.generate_username() user.save() return", "does not match signature') else: # get address field for the user model", "from django.conf import settings from web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD =", "provided data user = User.objects.filter(**kwargs).first() if user is None: # create the user", "Optional[User]: # check if the address the user has provided matches the signature", "= [field.name for field in User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and 'username'", "the user has provided matches the signature if address != recover_to_addr(token, signature): raise", "recover_to_addr(token, signature): raise ValueError('Wallet address does not match signature') else: # get address", "User._meta.fields] if ( address_field != DEFAULT_ADDRESS_FIELD and 'username' in fields ): user.username =", "import settings from web3auth.utils import recover_to_addr User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class", "address, token, signature ) -> Optional[User]: # check if the address the user", "# check if the address the user has provided matches the signature if", "create the user if it does not exist user = User(**{address_field: address}) fields", "Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token, signature ) -> Optional[User]: # check", "getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\": address } # try to", "# get address field for the user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD',", "class Web3Backend(backends.ModelBackend): def authenticate( self, request, address, token, signature ) -> Optional[User]: #", "exist user = User(**{address_field: address}) fields = [field.name for field in User._meta.fields] if", "the user model address_field = getattr( settings, 'WEB3AUTH_USER_ADDRESS_FIELD', DEFAULT_ADDRESS_FIELD) kwargs = { f\"{address_field}__iexact\":", "f\"{address_field}__iexact\": address } # try to get user with provided data user =", "User = get_user_model() DEFAULT_ADDRESS_FIELD = 'username' class Web3Backend(backends.ModelBackend): def authenticate( self, request, address,", "def authenticate( self, request, address, token, signature ) -> Optional[User]: # check if", "= User.objects.filter(**kwargs).first() if user is None: # create the user if it does", "Optional from django.contrib.auth import get_user_model, backends from django.conf import settings from web3auth.utils import", "} # try to get user with provided data user = User.objects.filter(**kwargs).first() if", "does not exist user = User(**{address_field: address}) fields = [field.name for field in", "the signature if address != recover_to_addr(token, signature): raise ValueError('Wallet address does not match", "# try to get user with provided data user = User.objects.filter(**kwargs).first() if user", "import Optional from django.contrib.auth import get_user_model, backends from django.conf import settings from web3auth.utils", "the user if it does not exist user = User(**{address_field: address}) fields =" ]
[ "LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self, category, name, start,", "duration self.args = args @property def end(self): return self.start + self.duration def __repr__(self):", "# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of", "source code is governed by a BSD-style license that can be # found", "of this source code is governed by a BSD-style license that can be", "end(self): return self.start + self.duration def __repr__(self): if self.args: args_str = ', '", "the LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self, category, name,", "= args @property def end(self): return self.start + self.duration def __repr__(self): if self.args:", "self.duration = duration self.args = args @property def end(self): return self.start + self.duration", "__repr__(self): if self.args: args_str = ', ' + repr(self.args) else: args_str = ''", "else: args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\" % ( self.name, self.start, self.duration,", "class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self, category, name, start, duration, args=None):", "this source code is governed by a BSD-style license that can be #", "args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\" % ( self.name, self.start, self.duration, args_str)", "category self.name = name self.start = start self.duration = duration self.args = args", "2013 The Chromium Authors. All rights reserved. # Use of this source code", "self.args = args @property def end(self): return self.start + self.duration def __repr__(self): if", "that can be # found in the LICENSE file. class TimelineEvent(object): \"\"\"Represents a", "timeline event.\"\"\" def __init__(self, category, name, start, duration, args=None): self.category = category self.name", "self.start = start self.duration = duration self.args = args @property def end(self): return", "be # found in the LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\"", "repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\" % ( self.name, self.start,", "a timeline event.\"\"\" def __init__(self, category, name, start, duration, args=None): self.category = category", "name self.start = start self.duration = duration self.args = args @property def end(self):", "+ self.duration def __repr__(self): if self.args: args_str = ', ' + repr(self.args) else:", "event.\"\"\" def __init__(self, category, name, start, duration, args=None): self.category = category self.name =", "Authors. All rights reserved. # Use of this source code is governed by", "self.category = category self.name = name self.start = start self.duration = duration self.args", "= category self.name = name self.start = start self.duration = duration self.args =", "+ repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\" % ( self.name,", "name, start, duration, args=None): self.category = category self.name = name self.start = start", "', ' + repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\" %", "The Chromium Authors. All rights reserved. # Use of this source code is", "Use of this source code is governed by a BSD-style license that can", "args_str = ', ' + repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s', start=%f,", "self.start + self.duration def __repr__(self): if self.args: args_str = ', ' + repr(self.args)", "self.name = name self.start = start self.duration = duration self.args = args @property", "= start self.duration = duration self.args = args @property def end(self): return self.start", "a BSD-style license that can be # found in the LICENSE file. class", "in the LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self, category,", "can be # found in the LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline", "__init__(self, category, name, start, duration, args=None): self.category = category self.name = name self.start", "return self.start + self.duration def __repr__(self): if self.args: args_str = ', ' +", "code is governed by a BSD-style license that can be # found in", "rights reserved. # Use of this source code is governed by a BSD-style", "if self.args: args_str = ', ' + repr(self.args) else: args_str = '' return", "= name self.start = start self.duration = duration self.args = args @property def", "license that can be # found in the LICENSE file. class TimelineEvent(object): \"\"\"Represents", "found in the LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self,", "start self.duration = duration self.args = args @property def end(self): return self.start +", "All rights reserved. # Use of this source code is governed by a", "def __repr__(self): if self.args: args_str = ', ' + repr(self.args) else: args_str =", "by a BSD-style license that can be # found in the LICENSE file.", "self.args: args_str = ', ' + repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s',", "is governed by a BSD-style license that can be # found in the", "\"\"\"Represents a timeline event.\"\"\" def __init__(self, category, name, start, duration, args=None): self.category =", "self.duration def __repr__(self): if self.args: args_str = ', ' + repr(self.args) else: args_str", "duration, args=None): self.category = category self.name = name self.start = start self.duration =", "governed by a BSD-style license that can be # found in the LICENSE", "TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self, category, name, start, duration, args=None): self.category", "= ', ' + repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\"", "reserved. # Use of this source code is governed by a BSD-style license", "file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def __init__(self, category, name, start, duration,", "# Use of this source code is governed by a BSD-style license that", "@property def end(self): return self.start + self.duration def __repr__(self): if self.args: args_str =", "BSD-style license that can be # found in the LICENSE file. class TimelineEvent(object):", "args=None): self.category = category self.name = name self.start = start self.duration = duration", "category, name, start, duration, args=None): self.category = category self.name = name self.start =", "Chromium Authors. All rights reserved. # Use of this source code is governed", "= duration self.args = args @property def end(self): return self.start + self.duration def", "args @property def end(self): return self.start + self.duration def __repr__(self): if self.args: args_str", "start, duration, args=None): self.category = category self.name = name self.start = start self.duration", "(c) 2013 The Chromium Authors. All rights reserved. # Use of this source", "# found in the LICENSE file. class TimelineEvent(object): \"\"\"Represents a timeline event.\"\"\" def", "def end(self): return self.start + self.duration def __repr__(self): if self.args: args_str = ',", "Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this", "def __init__(self, category, name, start, duration, args=None): self.category = category self.name = name", "' + repr(self.args) else: args_str = '' return \"TimelineEvent(name='%s', start=%f, duration=%s%s)\" % (" ]
[ "generalized linear models (VGLMs). :param size_factors: size factors for X :return: tuple: (groupwise_means,", "linear models (VGLMs). :param size_factors: size factors for X :return: tuple: (groupwise_means, mean,", "vector generalized linear models (VGLMs). :param size_factors: size factors for X :return: tuple:", "of normal GLMs. :param x: The sample data :param design_loc: design matrix for", "size factors for X :param groupwise_means: optional, in case if already computed this", "specified to spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean):", "x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale:", "a closed-form solution for the `mean` parameters of normal GLMs. :param x: The", "link_fn=np.log ): r\"\"\" Calculates a closed-form solution for the log-scale parameters of normal", "for the log-scale parameters of normal GLMs. :param x: The sample data :param", "logging import numpy as np import scipy.sparse from typing import Union from .external", "parameters) Tensor that encodes how complete parameter set which includes dependent parameters arises", "for X :return: tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc,", "is used in vector generalized linear models (VGLMs). :param size_factors: size factors for", "sample data :param design_scale: design matrix for scale :param constraints: some design constraints", "constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None,", "dependent parameters) Tensor that encodes how complete parameter set which includes dependent parameters", "tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales", "import numpy as np import scipy.sparse from typing import Union from .external import", "mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def", "closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x:", ":param constraints: some design constraints :param size_factors: size factors for X :param groupwise_means:", "= <constraints, indep>. This form of constraints is used in vector generalized linear", "x: The sample data :param design_loc: design matrix for location :param constraints_loc: tensor", "from indepedent parameters: all = <constraints, indep>. This form of constraints is used", ":param design_loc: design matrix for location :param constraints_loc: tensor (all parameters x dependent", "design_loc: design matrix for location :param constraints_loc: tensor (all parameters x dependent parameters)", "this can be specified to spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\"", "np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale, constraints=constraints, size_factors=size_factors, groupwise_means=groupwise_means, link_fn=link_fn, compute_scales_fun=compute_scales_fun )", "dependent parameters arises from indepedent parameters: all = <constraints, indep>. This form of", "already computed this can be specified to spare double-calculation :return: tuple (groupwise_scales, logsd,", ":param x: The sample data :param design_loc: design matrix for location :param constraints_loc:", "def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale, constraints=constraints,", "import logging import numpy as np import scipy.sparse from typing import Union from", "normal GLMs. :param x: The sample data :param design_loc: design matrix for location", "closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates", "np import scipy.sparse from typing import Union from .external import closedform_glm_mean, closedform_glm_scale logger", "indepedent parameters: all = <constraints, indep>. This form of constraints is used in", "for X :param groupwise_means: optional, in case if already computed this can be", ") def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ):", "size factors for X :return: tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x,", "a closed-form solution for the log-scale parameters of normal GLMs. :param x: The", "parameters x dependent parameters) Tensor that encodes how complete parameter set which includes", "groupwise_means: optional, in case if already computed this can be specified to spare", "X :param groupwise_means: optional, in case if already computed this can be specified", "that encodes how complete parameter set which includes dependent parameters arises from indepedent", "form of constraints is used in vector generalized linear models (VGLMs). :param size_factors:", "x: The sample data :param design_scale: design matrix for scale :param constraints: some", "scale :param constraints: some design constraints :param size_factors: size factors for X :param", "size_factors: size factors for X :param groupwise_means: optional, in case if already computed", "constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x ): r\"\"\" Calculates a closed-form", "groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form solution for the log-scale parameters of", ":return: tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn,", "x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a", "= np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale, constraints=constraints, size_factors=size_factors, groupwise_means=groupwise_means, link_fn=link_fn, compute_scales_fun=compute_scales_fun", "from typing import Union from .external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def", "(VGLMs). :param size_factors: size factors for X :return: tuple: (groupwise_means, mean, rmsd) \"\"\"", "GLMs. :param x: The sample data :param design_scale: design matrix for scale :param", "double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance)", "rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x,", "used in vector generalized linear models (VGLMs). :param size_factors: size factors for X", ":param x: The sample data :param design_scale: design matrix for scale :param constraints:", "data :param design_loc: design matrix for location :param constraints_loc: tensor (all parameters x", "matrix for scale :param constraints: some design constraints :param size_factors: size factors for", "def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\"", "rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd(", "constraints is used in vector generalized linear models (VGLMs). :param size_factors: size factors", "import Union from .external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x:", "indep>. This form of constraints is used in vector generalized linear models (VGLMs).", "Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form", "the `mean` parameters of normal GLMs. :param x: The sample data :param design_loc:", "models (VGLMs). :param size_factors: size factors for X :return: tuple: (groupwise_means, mean, rmsd)", "closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None,", "to spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales", "for location :param constraints_loc: tensor (all parameters x dependent parameters) Tensor that encodes", "factors for X :return: tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc,", "x: x ): r\"\"\" Calculates a closed-form solution for the `mean` parameters of", "design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x ): r\"\"\" Calculates", "x: x, inv_link_fn=lambda x: x ): r\"\"\" Calculates a closed-form solution for the", "spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales =", "how complete parameter set which includes dependent parameters arises from indepedent parameters: all", "optional, in case if already computed this can be specified to spare double-calculation", "design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form solution for", ":param groupwise_means: optional, in case if already computed this can be specified to", "which includes dependent parameters arises from indepedent parameters: all = <constraints, indep>. This", "case if already computed this can be specified to spare double-calculation :return: tuple", "closed-form solution for the `mean` parameters of normal GLMs. :param x: The sample", "if already computed this can be specified to spare double-calculation :return: tuple (groupwise_scales,", "size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x ): r\"\"\" Calculates a closed-form solution", "normal GLMs. :param x: The sample data :param design_scale: design matrix for scale", "size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None,", "): r\"\"\" Calculates a closed-form solution for the log-scale parameters of normal GLMs.", ":param design_scale: design matrix for scale :param constraints: some design constraints :param size_factors:", "scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x ): r\"\"\"", "Union from .external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray,", "of constraints is used in vector generalized linear models (VGLMs). :param size_factors: size", "compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale, constraints=constraints, size_factors=size_factors,", "sample data :param design_loc: design matrix for location :param constraints_loc: tensor (all parameters", "logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda", "for the `mean` parameters of normal GLMs. :param x: The sample data :param", "constraints: some design constraints :param size_factors: size factors for X :param groupwise_means: optional,", "parameters arises from indepedent parameters: all = <constraints, indep>. This form of constraints", "size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form solution for the log-scale parameters", "The sample data :param design_loc: design matrix for location :param constraints_loc: tensor (all", "The sample data :param design_scale: design matrix for scale :param constraints: some design", "logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale(", "groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale, constraints=constraints, size_factors=size_factors, groupwise_means=groupwise_means, link_fn=link_fn,", "dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray,", "design constraints :param size_factors: size factors for X :param groupwise_means: optional, in case", ":return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return", "x dependent parameters) Tensor that encodes how complete parameter set which includes dependent", "import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray,", "for scale :param constraints: some design constraints :param size_factors: size factors for X", ":param size_factors: size factors for X :return: tuple: (groupwise_means, mean, rmsd) \"\"\" return", "Calculates a closed-form solution for the log-scale parameters of normal GLMs. :param x:", "in vector generalized linear models (VGLMs). :param size_factors: size factors for X :return:", "scipy.sparse from typing import Union from .external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\")", "some design constraints :param size_factors: size factors for X :param groupwise_means: optional, in", "design_scale: design matrix for scale :param constraints: some design constraints :param size_factors: size", "r\"\"\" Calculates a closed-form solution for the log-scale parameters of normal GLMs. :param", "\"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale,", "solution for the `mean` parameters of normal GLMs. :param x: The sample data", "inv_link_fn=lambda x: x ): r\"\"\" Calculates a closed-form solution for the `mean` parameters", "Tensor that encodes how complete parameter set which includes dependent parameters arises from", "design matrix for scale :param constraints: some design constraints :param size_factors: size factors", "Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x ):", "tensor (all parameters x dependent parameters) Tensor that encodes how complete parameter set", "import scipy.sparse from typing import Union from .external import closedform_glm_mean, closedform_glm_scale logger =", "factors for X :param groupwise_means: optional, in case if already computed this can", "matrix for location :param constraints_loc: tensor (all parameters x dependent parameters) Tensor that", ":param constraints_loc: tensor (all parameters x dependent parameters) Tensor that encodes how complete", "parameters of normal GLMs. :param x: The sample data :param design_scale: design matrix", "complete parameter set which includes dependent parameters arises from indepedent parameters: all =", "closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix],", "set which includes dependent parameters arises from indepedent parameters: all = <constraints, indep>.", "includes dependent parameters arises from indepedent parameters: all = <constraints, indep>. This form", "log-scale parameters of normal GLMs. :param x: The sample data :param design_scale: design", "constraints :param size_factors: size factors for X :param groupwise_means: optional, in case if", "all = <constraints, indep>. This form of constraints is used in vector generalized", "GLMs. :param x: The sample data :param design_loc: design matrix for location :param", "closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc,", "This form of constraints is used in vector generalized linear models (VGLMs). :param", "arises from indepedent parameters: all = <constraints, indep>. This form of constraints is", "np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form solution for the", "typing import Union from .external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean(", "of normal GLMs. :param x: The sample data :param design_scale: design matrix for", "(groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn )", ":param size_factors: size factors for X :param groupwise_means: optional, in case if already", "inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log", ".external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc:", "the log-scale parameters of normal GLMs. :param x: The sample data :param design_scale:", "size_factors: size factors for X :return: tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean(", "link_fn=lambda x: x, inv_link_fn=lambda x: x ): r\"\"\" Calculates a closed-form solution for", "x, inv_link_fn=lambda x: x ): r\"\"\" Calculates a closed-form solution for the `mean`", "tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn", "data :param design_scale: design matrix for scale :param constraints: some design constraints :param", "r\"\"\" Calculates a closed-form solution for the `mean` parameters of normal GLMs. :param", "(all parameters x dependent parameters) Tensor that encodes how complete parameter set which", "numpy as np import scipy.sparse from typing import Union from .external import closedform_glm_mean,", "design matrix for location :param constraints_loc: tensor (all parameters x dependent parameters) Tensor", "<constraints, indep>. This form of constraints is used in vector generalized linear models", "return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray,", "can be specified to spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def", "be specified to spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance,", "\"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors, link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x:", "def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda", "np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x ): r\"\"\" Calculates a", "encodes how complete parameter set which includes dependent parameters arises from indepedent parameters:", "scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form solution", "logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x,", "): r\"\"\" Calculates a closed-form solution for the `mean` parameters of normal GLMs.", "closed-form solution for the log-scale parameters of normal GLMs. :param x: The sample", "link_fn=link_fn, inv_link_fn=inv_link_fn ) def closedform_norm_glm_logsd( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_scale: np.ndarray, constraints=None, size_factors=None, groupwise_means=None,", "X :return: tuple: (groupwise_means, mean, rmsd) \"\"\" return closedform_glm_mean( x=x, dmat=design_loc, constraints=constraints_loc, size_factors=size_factors,", "x ): r\"\"\" Calculates a closed-form solution for the `mean` parameters of normal", "computed this can be specified to spare double-calculation :return: tuple (groupwise_scales, logsd, rmsd)", "parameter set which includes dependent parameters arises from indepedent parameters: all = <constraints,", "parameters of normal GLMs. :param x: The sample data :param design_loc: design matrix", "solution for the log-scale parameters of normal GLMs. :param x: The sample data", "location :param constraints_loc: tensor (all parameters x dependent parameters) Tensor that encodes how", "constraints=None, size_factors=None, groupwise_means=None, link_fn=np.log ): r\"\"\" Calculates a closed-form solution for the log-scale", "(groupwise_scales, logsd, rmsd) \"\"\" def compute_scales_fun(variance, mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return", "`mean` parameters of normal GLMs. :param x: The sample data :param design_loc: design", "mean): groupwise_scales = np.sqrt(variance) return groupwise_scales return closedform_glm_scale( x=x, design_scale=design_scale, constraints=constraints, size_factors=size_factors, groupwise_means=groupwise_means,", "Calculates a closed-form solution for the `mean` parameters of normal GLMs. :param x:", "= logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x:", "parameters: all = <constraints, indep>. This form of constraints is used in vector", "in case if already computed this can be specified to spare double-calculation :return:", "as np import scipy.sparse from typing import Union from .external import closedform_glm_mean, closedform_glm_scale", "from .external import closedform_glm_mean, closedform_glm_scale logger = logging.getLogger(\"batchglm\") def closedform_norm_glm_mean( x: Union[np.ndarray, scipy.sparse.csr_matrix],", "constraints_loc: tensor (all parameters x dependent parameters) Tensor that encodes how complete parameter", "x: Union[np.ndarray, scipy.sparse.csr_matrix], design_loc: np.ndarray, constraints_loc, size_factors=None, link_fn=lambda x: x, inv_link_fn=lambda x: x" ]
[ "# -*- coding: utf-8 -*- # Copyright (c) 2019, 9T9IT and contributors #", "import unicode_literals import frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if", "\"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no =", "-*- # Copyright (c) 2019, 9T9IT and contributors # For license information, please", "information, please see license.txt from __future__ import unicode_literals import frappe def after_insert(doc, method):", "contributors # For license information, please see license.txt from __future__ import unicode_literals import", "# For license information, please see license.txt from __future__ import unicode_literals import frappe", "= frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value,", "\"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no", "after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code,", "(c) 2019, 9T9IT and contributors # For license information, please see license.txt from", "please see license.txt from __future__ import unicode_literals import frappe def after_insert(doc, method): is_gift_card", "is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc(", "Copyright (c) 2019, 9T9IT and contributors # For license information, please see license.txt", "license information, please see license.txt from __future__ import unicode_literals import frappe def after_insert(doc,", "coding: utf-8 -*- # Copyright (c) 2019, 9T9IT and contributors # For license", "frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, }", "\"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\",", "__future__ import unicode_literals import frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\")", "def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\",", "frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\":", "if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\":", "gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\": doc.serial_no}) if", "9T9IT and contributors # For license information, please see license.txt from __future__ import", "utf-8 -*- # Copyright (c) 2019, 9T9IT and contributors # For license information,", "<reponame>iptelephony/optic_store # -*- coding: utf-8 -*- # Copyright (c) 2019, 9T9IT and contributors", "from __future__ import unicode_literals import frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code,", "frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc,", "doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\":", "2019, 9T9IT and contributors # For license information, please see license.txt from __future__", "see license.txt from __future__ import unicode_literals import frappe def after_insert(doc, method): is_gift_card =", "doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert()", ").insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\": doc.serial_no}) if gift_card_no: frappe.delete_doc(\"Gift", "and contributors # For license information, please see license.txt from __future__ import unicode_literals", "import frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value", "license.txt from __future__ import unicode_literals import frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\",", "# Copyright (c) 2019, 9T9IT and contributors # For license information, please see", "\"amount\": gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\": doc.serial_no})", "frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value =", "Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift", "-*- coding: utf-8 -*- # Copyright (c) 2019, 9T9IT and contributors # For", "is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no,", "} ).insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\": doc.serial_no}) if gift_card_no:", "def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\": doc.serial_no}) if gift_card_no: frappe.delete_doc(\"Gift Card\",", "on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\", {\"gift_card_no\": doc.serial_no}) if gift_card_no: frappe.delete_doc(\"Gift Card\", gift_card_no)", "gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\":", "\"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def", "unicode_literals import frappe def after_insert(doc, method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card:", "{ \"doctype\": \"Gift Card\", \"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc, method):", "doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( { \"doctype\": \"Gift", "= frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\") frappe.get_doc( {", "method): is_gift_card = frappe.db.get_value(\"Item\", doc.item_code, \"is_gift_card\") if is_gift_card: gift_card_value = frappe.db.get_value(\"Item\", doc.item_code, \"gift_card_value\")", "For license information, please see license.txt from __future__ import unicode_literals import frappe def", "\"gift_card_no\": doc.serial_no, \"amount\": gift_card_value, } ).insert() def on_trash(doc, method): gift_card_no = frappe.db.exists(\"Gift Card\"," ]
[ "decade: 5256000 # print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit", "a person's first name, middle and last then greet with full name #", "soc-wk1-cert-Diana-Ilinca.py # # Day1 homework # #hours in a year:8760 # print(365*24) #", "print((2**63)/100/60/60/24) # Day3 homework # Program that asks for a person's first name,", "#AngryBoss # Boss= raw_input(\"What do you want this time?!?\".upper()) # print('SO YOU THINK", "IDEA??? FIRED!') #Table of Contents print('Chapter1 : Getting Started'+ ' page 10'.rjust(10)) print('Chapter", "Program that asks for a person's favourite number, add 1 and suggest the", "that asks for a person's favourite number, add 1 and suggest the result", "to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) #", "name?\") # middlename = raw_input(\"What is your middle name?\") # lastname = raw_input(\"What", "# Program that asks for a person's favourite number, add 1 and suggest", "first name, middle and last then greet with full name # firstname =", "# print((2**63)/100/60/60/24) # Day3 homework # Program that asks for a person's first", "for a person's favourite number, add 1 and suggest the result # fav_number", "to consider ' +str(suggestion)+ ' as a new fav. Just a thought..') #", "middle name?\") # lastname = raw_input(\"What is your last name?\") # print('Hello there", "# firstname = raw_input(\"What is your first name?\") # middlename = raw_input(\"What is", "# #age in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system to timeout:497 #", "this time?!?\".upper()) # print('SO YOU THINK ' + str(Boss).upper()+' IS A GOOD IDEA???", "print('your number is nice. However you might want to consider ' +str(suggestion)+ '", "print(60*60*24*365*36) # #days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system", "Day1 homework # #hours in a year:8760 # print(365*24) # #minutes in a", ": Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter", "# print('SO YOU THINK ' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table", "# #days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system to", "str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1 : Getting Started'+", "homework # Program that asks for a person's first name, middle and last", "time?!?\".upper()) # print('SO YOU THINK ' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!')", "Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter 3:", "64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework # Program that asks", "is your first name?\") # middlename = raw_input(\"What is your middle name?\") #", "+str(suggestion)+ ' as a new fav. Just a thought..') # #AngryBoss # Boss=", "homework # #hours in a year:8760 # print(365*24) # #minutes in a decade:", "nice. However you might want to consider ' +str(suggestion)+ ' as a new", "YOU THINK ' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table of Contents", "number is nice. However you might want to consider ' +str(suggestion)+ ' as", "# print(60*60*24*365*36) # #days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit", "name, middle and last then greet with full name # firstname = raw_input(\"What", "greet with full name # firstname = raw_input(\"What is your first name?\") #", "# Boss= raw_input(\"What do you want this time?!?\".upper()) # print('SO YOU THINK '", "THINK ' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1", "a new fav. Just a thought..') # #AngryBoss # Boss= raw_input(\"What do you", "print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system to timeout:497", "a decade: 5256000 # print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36) # #days", "your first name?\") # middlename = raw_input(\"What is your middle name?\") # lastname", "fav_number = int(raw_input(\"What is your favourite number?\")) # suggestion = fav_number+1 # print('your", "print('Chapter1 : Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17))", "with full name # firstname = raw_input(\"What is your first name?\") # middlename", "print('SO YOU THINK ' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table of", "A GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1 : Getting Started'+ ' page", "seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) # #days", "there '+firstname+ middlename +lastname+ '!!') # # Program that asks for a person's", "# print(365*24) # #minutes in a decade: 5256000 # print(60*24*365*10) # #age in", "your favourite number?\")) # suggestion = fav_number+1 # print('your number is nice. However", "timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework # Program that asks for a person's", "last name?\") # print('Hello there '+firstname+ middlename +lastname+ '!!') # # Program that", "'!!') # # Program that asks for a person's favourite number, add 1", "is nice. However you might want to consider ' +str(suggestion)+ ' as a", "number, add 1 and suggest the result # fav_number = int(raw_input(\"What is your", "= fav_number+1 # print('your number is nice. However you might want to consider", "you want this time?!?\".upper()) # print('SO YOU THINK ' + str(Boss).upper()+' IS A", "Boss= raw_input(\"What do you want this time?!?\".upper()) # print('SO YOU THINK ' +", "the result # fav_number = int(raw_input(\"What is your favourite number?\")) # suggestion =", "name # firstname = raw_input(\"What is your first name?\") # middlename = raw_input(\"What", "print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter 3: Letters page'+ ' page 13'.rjust(13))", "system to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24)", "# #hours in a year:8760 # print(365*24) # #minutes in a decade: 5256000", "in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) #", "# print('Hello there '+firstname+ middlename +lastname+ '!!') # # Program that asks for", "consider ' +str(suggestion)+ ' as a new fav. Just a thought..') # #AngryBoss", "as a new fav. Just a thought..') # #AngryBoss # Boss= raw_input(\"What do", "a person's favourite number, add 1 and suggest the result # fav_number =", "lastname = raw_input(\"What is your last name?\") # print('Hello there '+firstname+ middlename +lastname+", "# middlename = raw_input(\"What is your middle name?\") # lastname = raw_input(\"What is", "#hours in a year:8760 # print(365*24) # #minutes in a decade: 5256000 #", "However you might want to consider ' +str(suggestion)+ ' as a new fav.", "want to consider ' +str(suggestion)+ ' as a new fav. Just a thought..')", "number?\")) # suggestion = fav_number+1 # print('your number is nice. However you might", "#age in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24)", "add 1 and suggest the result # fav_number = int(raw_input(\"What is your favourite", "to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework # Program that asks for a", "# # Program that asks for a person's favourite number, add 1 and", "raw_input(\"What is your middle name?\") # lastname = raw_input(\"What is your last name?\")", "'+firstname+ middlename +lastname+ '!!') # # Program that asks for a person's favourite", "print(365*24) # #minutes in a decade: 5256000 # print(60*24*365*10) # #age in seconds:1135296000", "#minutes in a decade: 5256000 # print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36)", "new fav. Just a thought..') # #AngryBoss # Boss= raw_input(\"What do you want", "your last name?\") # print('Hello there '+firstname+ middlename +lastname+ '!!') # # Program", "a year:8760 # print(365*24) # #minutes in a decade: 5256000 # print(60*24*365*10) #", "1 and suggest the result # fav_number = int(raw_input(\"What is your favourite number?\"))", "is your favourite number?\")) # suggestion = fav_number+1 # print('your number is nice.", "a thought..') # #AngryBoss # Boss= raw_input(\"What do you want this time?!?\".upper()) #", "' page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter 3: Letters page'+", "you might want to consider ' +str(suggestion)+ ' as a new fav. Just", "Day3 homework # Program that asks for a person's first name, middle and", "person's first name, middle and last then greet with full name # firstname", "middlename +lastname+ '!!') # # Program that asks for a person's favourite number,", "5256000 # print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system", "# print('your number is nice. However you might want to consider ' +str(suggestion)+", "asks for a person's first name, middle and last then greet with full", "is your middle name?\") # lastname = raw_input(\"What is your last name?\") #", "# print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework", "Just a thought..') # #AngryBoss # Boss= raw_input(\"What do you want this time?!?\".upper())", "# #minutes in a decade: 5256000 # print(60*24*365*10) # #age in seconds:1135296000 #", "' + str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1 :", "in a decade: 5256000 # print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36) #", "= raw_input(\"What is your last name?\") # print('Hello there '+firstname+ middlename +lastname+ '!!')", "32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673 #", "Contents print('Chapter1 : Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page", "and last then greet with full name # firstname = raw_input(\"What is your", "print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework #", "first name?\") # middlename = raw_input(\"What is your middle name?\") # lastname =", "raw_input(\"What is your last name?\") # print('Hello there '+firstname+ middlename +lastname+ '!!') #", "in a year:8760 # print(365*24) # #minutes in a decade: 5256000 # print(60*24*365*10)", "full name # firstname = raw_input(\"What is your first name?\") # middlename =", "' as a new fav. Just a thought..') # #AngryBoss # Boss= raw_input(\"What", "want this time?!?\".upper()) # print('SO YOU THINK ' + str(Boss).upper()+' IS A GOOD", "= int(raw_input(\"What is your favourite number?\")) # suggestion = fav_number+1 # print('your number", "Program that asks for a person's first name, middle and last then greet", "that asks for a person's first name, middle and last then greet with", "middle and last then greet with full name # firstname = raw_input(\"What is", "page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter 3: Letters page'+ '", "asks for a person's favourite number, add 1 and suggest the result #", "+lastname+ '!!') # # Program that asks for a person's favourite number, add", "do you want this time?!?\".upper()) # print('SO YOU THINK ' + str(Boss).upper()+' IS", "name?\") # print('Hello there '+firstname+ middlename +lastname+ '!!') # # Program that asks", "of Contents print('Chapter1 : Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+ '", "fav_number+1 # print('your number is nice. However you might want to consider '", "your middle name?\") # lastname = raw_input(\"What is your last name?\") # print('Hello", "# # soc-wk1-cert-Diana-Ilinca.py # # Day1 homework # #hours in a year:8760 #", "#days 32-bit system to timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673", "suggest the result # fav_number = int(raw_input(\"What is your favourite number?\")) # suggestion", "= raw_input(\"What is your middle name?\") # lastname = raw_input(\"What is your last", "FIRED!') #Table of Contents print('Chapter1 : Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2:", "# #days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework # Program", "GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1 : Getting Started'+ ' page 10'.rjust(10))", "raw_input(\"What is your first name?\") # middlename = raw_input(\"What is your middle name?\")", "#days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework # Program that", "+ str(Boss).upper()+' IS A GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1 : Getting", "#Table of Contents print('Chapter1 : Getting Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+", "# suggestion = fav_number+1 # print('your number is nice. However you might want", "IS A GOOD IDEA??? FIRED!') #Table of Contents print('Chapter1 : Getting Started'+ '", "# lastname = raw_input(\"What is your last name?\") # print('Hello there '+firstname+ middlename", "fav. Just a thought..') # #AngryBoss # Boss= raw_input(\"What do you want this", "# Program that asks for a person's first name, middle and last then", "last then greet with full name # firstname = raw_input(\"What is your first", "and suggest the result # fav_number = int(raw_input(\"What is your favourite number?\")) #", "name?\") # lastname = raw_input(\"What is your last name?\") # print('Hello there '+firstname+", "int(raw_input(\"What is your favourite number?\")) # suggestion = fav_number+1 # print('your number is", "' +str(suggestion)+ ' as a new fav. Just a thought..') # #AngryBoss #", "then greet with full name # firstname = raw_input(\"What is your first name?\")", "# fav_number = int(raw_input(\"What is your favourite number?\")) # suggestion = fav_number+1 #", "10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter 3: Letters page'+ ' page", "# soc-wk1-cert-Diana-Ilinca.py # # Day1 homework # #hours in a year:8760 # print(365*24)", "favourite number, add 1 and suggest the result # fav_number = int(raw_input(\"What is", "= raw_input(\"What is your first name?\") # middlename = raw_input(\"What is your middle", "# print(60*24*365*10) # #age in seconds:1135296000 # print(60*60*24*365*36) # #days 32-bit system to", "# # Day1 homework # #hours in a year:8760 # print(365*24) # #minutes", "year:8760 # print(365*24) # #minutes in a decade: 5256000 # print(60*24*365*10) # #age", "might want to consider ' +str(suggestion)+ ' as a new fav. Just a", "person's favourite number, add 1 and suggest the result # fav_number = int(raw_input(\"What", "thought..') # #AngryBoss # Boss= raw_input(\"What do you want this time?!?\".upper()) # print('SO", "result # fav_number = int(raw_input(\"What is your favourite number?\")) # suggestion = fav_number+1", "suggestion = fav_number+1 # print('your number is nice. However you might want to", "print('Hello there '+firstname+ middlename +lastname+ '!!') # # Program that asks for a", "for a person's first name, middle and last then greet with full name", "middlename = raw_input(\"What is your middle name?\") # lastname = raw_input(\"What is your", "raw_input(\"What do you want this time?!?\".upper()) # print('SO YOU THINK ' + str(Boss).upper()+'", "system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3 homework # Program that asks for", "<reponame>dianaproca/toolkitten<filename>soc-wk1-cert-Diana-Ilinca.py # # soc-wk1-cert-Diana-Ilinca.py # # Day1 homework # #hours in a year:8760", "is your last name?\") # print('Hello there '+firstname+ middlename +lastname+ '!!') # #", "Started'+ ' page 10'.rjust(10)) print('Chapter 2: Numbers'+ ' page 9'.rjust(17)) print('Chapter 3: Letters", "# Day3 homework # Program that asks for a person's first name, middle", "favourite number?\")) # suggestion = fav_number+1 # print('your number is nice. However you", "firstname = raw_input(\"What is your first name?\") # middlename = raw_input(\"What is your", "# #AngryBoss # Boss= raw_input(\"What do you want this time?!?\".upper()) # print('SO YOU", "timeout:497 # print((2**32-1)/100/60/60/24) # #days 64-bit system to timeout:1067519911673 # print((2**63)/100/60/60/24) # Day3", "# Day1 homework # #hours in a year:8760 # print(365*24) # #minutes in" ]
[ "distributed in the hope that it will be useful, # but WITHOUT ANY", "the GNU General Public License # along with Diamond. If not, see <http://www.gnu.org/licenses/>.", "\"insert\", \"location\": path, \"index\": index, \"value\": tag + (\" \" + value if", "PURPOSE. See the # GNU General Public License for more details. # #", "either version 3 of the License, or # (at your option) any later", "[] def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return len(self.script) def", "userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\") for edit in self.script: node =", "by # the Free Software Foundation, either version 3 of the License, or", "version 3 of the License, or # (at your option) any later version.", "\"value\": tag + (\" \" + value if value is not None else", "published by # the Free Software Foundation, either version 3 of the License,", "it and/or modify # it under the terms of the GNU General Public", "value if value is not None else \"\"), \"userdata\": userdata}) def delete(self, path,", "dxdiff. # # dxdiff is free software: you can redistribute it and/or modify", "pretty_print = True) def __len__(self): return len(self.script) def __getitem__(self, key): return self.script[key] def", "\"location\": path, \"value\": value, \"userdata\": userdata }) def insert(self, path, index, tag, value", "__str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return len(self.script) def __getitem__(self, key):", "\"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] =", "def write(self, path): self.to_xml().write(path, pretty_print = True, xml_declaration = True, encoding = \"utf-8\")", "\"value\": value, \"userdata\": userdata }) def insert(self, path, index, tag, value = None,", "True) def __len__(self): return len(self.script) def __getitem__(self, key): return self.script[key] def __iter__(self): return", "# it under the terms of the GNU General Public License as published", "terms of the GNU General Public License as published by # the Free", "for edit in self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in", "\"value\" in edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path,", "GNU General Public License as published by # the Free Software Foundation, either", "index, tag, value = None, userdata = None): self.script.append({ \"type\": \"insert\", \"location\": path,", "\"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def move(self, path, destination, index, userdata =", "it under the terms of the GNU General Public License as published by", "etree.Element(\"xmldiff\") for edit in self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\"", "#!/usr/bin/env python # This file is part of dxdiff. # # dxdiff is", "userdata = None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index, \"value\": destination, \"userdata\":", "return len(self.script) def __getitem__(self, key): return self.script[key] def __iter__(self): return self.script.__iter__() def update(self,", "License for more details. # # You should have received a copy of", "edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit: node.text =", "free software: you can redistribute it and/or modify # it under the terms", "self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] =", "edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if", "you can redistribute it and/or modify # it under the terms of the", "userdata = None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value, \"userdata\": userdata })", "see <http://www.gnu.org/licenses/>. from lxml import etree class EditScript: def __init__(self): self.script = []", "= None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index, \"value\": destination, \"userdata\": userdata", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General", "<http://www.gnu.org/licenses/>. from lxml import etree class EditScript: def __init__(self): self.script = [] def", "to_xml(self): tree = etree.Element(\"xmldiff\") for edit in self.script: node = etree.Element(edit[\"type\"], location =", "key): return self.script[key] def __iter__(self): return self.script.__iter__() def update(self, path, value, userdata =", "of the GNU General Public License as published by # the Free Software", "\"delete\", \"location\": path, \"userdata\": userdata}) def move(self, path, destination, index, userdata = None):", "import etree class EditScript: def __init__(self): self.script = [] def __str__(self): return etree.tostring(self.to_xml(),", "redistribute it and/or modify # it under the terms of the GNU General", "node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"]", "self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index, \"value\": tag + (\" \" +", "Public License as published by # the Free Software Foundation, either version 3", "modify # it under the terms of the GNU General Public License as", "\"location\": path, \"userdata\": userdata}) def move(self, path, destination, index, userdata = None): self.script.append({", "of the License, or # (at your option) any later version. # #", "the Free Software Foundation, either version 3 of the License, or # (at", "should have received a copy of the GNU General Public License # along", "PARTICULAR PURPOSE. See the # GNU General Public License for more details. #", "insert(self, path, index, tag, value = None, userdata = None): self.script.append({ \"type\": \"insert\",", "move(self, path, destination, index, userdata = None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\":", "def __len__(self): return len(self.script) def __getitem__(self, key): return self.script[key] def __iter__(self): return self.script.__iter__()", "\"move\", \"location\": path, \"index\": index, \"value\": destination, \"userdata\": userdata }) def to_xml(self): tree", "License, or # (at your option) any later version. # # dxdiff is", "is free software: you can redistribute it and/or modify # it under the", "without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "return self.script[key] def __iter__(self): return self.script.__iter__() def update(self, path, value, userdata = None):", "# dxdiff is free software: you can redistribute it and/or modify # it", "= [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return len(self.script)", "if value is not None else \"\"), \"userdata\": userdata}) def delete(self, path, userdata", "}) def to_xml(self): tree = etree.Element(\"xmldiff\") for edit in self.script: node = etree.Element(edit[\"type\"],", "hope that it will be useful, # but WITHOUT ANY WARRANTY; without even", "\"userdata\": userdata }) def insert(self, path, index, tag, value = None, userdata =", "__iter__(self): return self.script.__iter__() def update(self, path, value, userdata = None): self.script.append({ \"type\": \"update\",", "(\" \" + value if value is not None else \"\"), \"userdata\": userdata})", "edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print =", "path, \"value\": value, \"userdata\": userdata }) def insert(self, path, index, tag, value =", "userdata}) def delete(self, path, userdata = None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\":", "return self.script.__iter__() def update(self, path, value, userdata = None): self.script.append({ \"type\": \"update\", \"location\":", "\"update\", \"location\": path, \"value\": value, \"userdata\": userdata }) def insert(self, path, index, tag,", "def __iter__(self): return self.script.__iter__() def update(self, path, value, userdata = None): self.script.append({ \"type\":", "path, \"userdata\": userdata}) def move(self, path, destination, index, userdata = None): self.script.append({ \"type\":", "tag + (\" \" + value if value is not None else \"\"),", "the GNU General Public License as published by # the Free Software Foundation,", "\"type\": \"insert\", \"location\": path, \"index\": index, \"value\": tag + (\" \" + value", "is not None else \"\"), \"userdata\": userdata}) def delete(self, path, userdata = None):", "def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return len(self.script) def __getitem__(self,", "userdata = None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index, \"value\": tag +", "node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\"", "# dxdiff is distributed in the hope that it will be useful, #", "userdata}) def move(self, path, destination, index, userdata = None): self.script.append({ \"type\": \"move\", \"location\":", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #", "You should have received a copy of the GNU General Public License #", "etree class EditScript: def __init__(self): self.script = [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print", "value = None, userdata = None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index,", "# the Free Software Foundation, either version 3 of the License, or #", "path, destination, index, userdata = None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index,", "None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit: node.text = edit[\"value\"] tree.append(node) return", "See the # GNU General Public License for more details. # # You", "General Public License # along with Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml", "= edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not", "(at your option) any later version. # # dxdiff is distributed in the", "None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index, \"value\": destination, \"userdata\": userdata })", "node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree)", "Foundation, either version 3 of the License, or # (at your option) any", "\"\"), \"userdata\": userdata}) def delete(self, path, userdata = None): self.script.append({ \"type\": \"delete\", \"location\":", "edit in self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in edit:", "len(self.script) def __getitem__(self, key): return self.script[key] def __iter__(self): return self.script.__iter__() def update(self, path,", "along with Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml import etree class EditScript:", "details. # # You should have received a copy of the GNU General", "python # This file is part of dxdiff. # # dxdiff is free", "edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print = True, xml_declaration =", "file is part of dxdiff. # # dxdiff is free software: you can", "= None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def move(self, path, destination,", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "GNU General Public License # along with Diamond. If not, see <http://www.gnu.org/licenses/>. from", "= edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in", "useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of #", "Public License # along with Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml import", "in self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"]", "node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print = True,", "that it will be useful, # but WITHOUT ANY WARRANTY; without even the", "update(self, path, value, userdata = None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value,", "# along with Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml import etree class", "def to_xml(self): tree = etree.Element(\"xmldiff\") for edit in self.script: node = etree.Element(edit[\"type\"], location", "+ (\" \" + value if value is not None else \"\"), \"userdata\":", "# This file is part of dxdiff. # # dxdiff is free software:", "edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit:", "License as published by # the Free Software Foundation, either version 3 of", "received a copy of the GNU General Public License # along with Diamond.", "the terms of the GNU General Public License as published by # the", "\"userdata\": userdata}) def move(self, path, destination, index, userdata = None): self.script.append({ \"type\": \"move\",", "or # (at your option) any later version. # # dxdiff is distributed", "as published by # the Free Software Foundation, either version 3 of the", "not, see <http://www.gnu.org/licenses/>. from lxml import etree class EditScript: def __init__(self): self.script =", "etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return len(self.script) def __getitem__(self, key): return self.script[key]", "self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value, \"userdata\": userdata }) def insert(self, path,", "index, \"value\": tag + (\" \" + value if value is not None", "EditScript: def __init__(self): self.script = [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True)", "not None else \"\"), \"userdata\": userdata}) def delete(self, path, userdata = None): self.script.append({", "self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def move(self, path, destination, index, userdata", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "def insert(self, path, index, tag, value = None, userdata = None): self.script.append({ \"type\":", "= etree.Element(\"xmldiff\") for edit in self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"]) if", "= etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if", "of dxdiff. # # dxdiff is free software: you can redistribute it and/or", "tag, value = None, userdata = None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\":", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return len(self.script) def __getitem__(self, key): return", "\"index\": index, \"value\": tag + (\" \" + value if value is not", "\" + value if value is not None else \"\"), \"userdata\": userdata}) def", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "\"type\": \"update\", \"location\": path, \"value\": value, \"userdata\": userdata }) def insert(self, path, index,", "= None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index, \"value\": tag + (\"", "index, \"value\": destination, \"userdata\": userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\") for edit", "def delete(self, path, userdata = None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata})", "it will be useful, # but WITHOUT ANY WARRANTY; without even the implied", "path, index, tag, value = None, userdata = None): self.script.append({ \"type\": \"insert\", \"location\":", "if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit: node.text", "WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS", "= edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print = True, xml_declaration", "the hope that it will be useful, # but WITHOUT ANY WARRANTY; without", "a copy of the GNU General Public License # along with Diamond. If", "delete(self, path, userdata = None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def", "destination, \"userdata\": userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\") for edit in self.script:", "# # You should have received a copy of the GNU General Public", "= edit[\"userdata\"] if \"value\" in edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def", "the License, or # (at your option) any later version. # # dxdiff", "value, \"userdata\": userdata }) def insert(self, path, index, tag, value = None, userdata", "etree.Element(edit[\"type\"], location = edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"]", "= None, userdata = None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index, \"value\":", "General Public License for more details. # # You should have received a", "FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more", "None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def move(self, path, destination, index,", "is part of dxdiff. # # dxdiff is free software: you can redistribute", "def __init__(self): self.script = [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def", "be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of", "have received a copy of the GNU General Public License # along with", "location = edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is", "copy of the GNU General Public License # along with Diamond. If not,", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "if \"value\" in edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path):", "version. # # dxdiff is distributed in the hope that it will be", "__getitem__(self, key): return self.script[key] def __iter__(self): return self.script.__iter__() def update(self, path, value, userdata", "else \"\"), \"userdata\": userdata}) def delete(self, path, userdata = None): self.script.append({ \"type\": \"delete\",", "\"value\": destination, \"userdata\": userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\") for edit in", "tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print = True, xml_declaration = True,", "None, userdata = None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index, \"value\": tag", "dxdiff is free software: you can redistribute it and/or modify # it under", "# You should have received a copy of the GNU General Public License", "Public License for more details. # # You should have received a copy", "If not, see <http://www.gnu.org/licenses/>. from lxml import etree class EditScript: def __init__(self): self.script", "def move(self, path, destination, index, userdata = None): self.script.append({ \"type\": \"move\", \"location\": path,", "value is not None else \"\"), \"userdata\": userdata}) def delete(self, path, userdata =", "edit[\"userdata\"] if \"value\" in edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self,", "path, \"index\": index, \"value\": destination, \"userdata\": userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\")", "path, userdata = None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def move(self,", "etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print = True, xml_declaration = True, encoding =", "\"type\": \"move\", \"location\": path, \"index\": index, \"value\": destination, \"userdata\": userdata }) def to_xml(self):", "self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index, \"value\": destination, \"userdata\": userdata }) def", "of the GNU General Public License # along with Diamond. If not, see", "is distributed in the hope that it will be useful, # but WITHOUT", "if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"]", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public", "tree = etree.Element(\"xmldiff\") for edit in self.script: node = etree.Element(edit[\"type\"], location = edit[\"location\"])", "in the hope that it will be useful, # but WITHOUT ANY WARRANTY;", "__init__(self): self.script = [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self):", "your option) any later version. # # dxdiff is distributed in the hope", "FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for", "__len__(self): return len(self.script) def __getitem__(self, key): return self.script[key] def __iter__(self): return self.script.__iter__() def", "from lxml import etree class EditScript: def __init__(self): self.script = [] def __str__(self):", "\"userdata\": userdata}) def delete(self, path, userdata = None): self.script.append({ \"type\": \"delete\", \"location\": path,", "License # along with Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml import etree", "lxml import etree class EditScript: def __init__(self): self.script = [] def __str__(self): return", "in edit: node.text = edit[\"value\"] tree.append(node) return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print", "and/or modify # it under the terms of the GNU General Public License", "\"index\": index, \"value\": destination, \"userdata\": userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\") for", "def update(self, path, value, userdata = None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\":", "userdata = None): self.script.append({ \"type\": \"delete\", \"location\": path, \"userdata\": userdata}) def move(self, path,", "part of dxdiff. # # dxdiff is free software: you can redistribute it", "dxdiff is distributed in the hope that it will be useful, # but", "# # dxdiff is distributed in the hope that it will be useful,", "the # GNU General Public License for more details. # # You should", "3 of the License, or # (at your option) any later version. #", "any later version. # # dxdiff is distributed in the hope that it", "self.script.__iter__() def update(self, path, value, userdata = None): self.script.append({ \"type\": \"update\", \"location\": path,", "A PARTICULAR PURPOSE. See the # GNU General Public License for more details.", "This file is part of dxdiff. # # dxdiff is free software: you", "or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License", "self.script = [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print = True) def __len__(self): return", "self.script[key] def __iter__(self): return self.script.__iter__() def update(self, path, value, userdata = None): self.script.append({", "for more details. # # You should have received a copy of the", "None): self.script.append({ \"type\": \"insert\", \"location\": path, \"index\": index, \"value\": tag + (\" \"", "in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not None: node.attrib[\"userdata\"] = edit[\"userdata\"]", "is not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit: node.text = edit[\"value\"]", "destination, index, userdata = None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index, \"value\":", "+ value if value is not None else \"\"), \"userdata\": userdata}) def delete(self,", "option) any later version. # # dxdiff is distributed in the hope that", "class EditScript: def __init__(self): self.script = [] def __str__(self): return etree.tostring(self.to_xml(), pretty_print =", "with Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml import etree class EditScript: def", "Diamond. If not, see <http://www.gnu.org/licenses/>. from lxml import etree class EditScript: def __init__(self):", "edit[\"location\"]) if \"index\" in edit: node.attrib[\"index\"] = edit[\"index\"] if edit[\"userdata\"] is not None:", "\"userdata\": userdata }) def to_xml(self): tree = etree.Element(\"xmldiff\") for edit in self.script: node", "will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty", "= None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value, \"userdata\": userdata }) def", "General Public License as published by # the Free Software Foundation, either version", "userdata }) def insert(self, path, index, tag, value = None, userdata = None):", "\"location\": path, \"index\": index, \"value\": destination, \"userdata\": userdata }) def to_xml(self): tree =", "software: you can redistribute it and/or modify # it under the terms of", "index, userdata = None): self.script.append({ \"type\": \"move\", \"location\": path, \"index\": index, \"value\": destination,", "later version. # # dxdiff is distributed in the hope that it will", "path, value, userdata = None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value, \"userdata\":", "None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value, \"userdata\": userdata }) def insert(self,", "more details. # # You should have received a copy of the GNU", "\"location\": path, \"index\": index, \"value\": tag + (\" \" + value if value", "def __getitem__(self, key): return self.script[key] def __iter__(self): return self.script.__iter__() def update(self, path, value,", "# # dxdiff is free software: you can redistribute it and/or modify #", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU", "value, userdata = None): self.script.append({ \"type\": \"update\", \"location\": path, \"value\": value, \"userdata\": userdata", "GNU General Public License for more details. # # You should have received", "None else \"\"), \"userdata\": userdata}) def delete(self, path, userdata = None): self.script.append({ \"type\":", "not None: node.attrib[\"userdata\"] = edit[\"userdata\"] if \"value\" in edit: node.text = edit[\"value\"] tree.append(node)", "under the terms of the GNU General Public License as published by #", "# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "# GNU General Public License for more details. # # You should have", "path, \"index\": index, \"value\": tag + (\" \" + value if value is", "}) def insert(self, path, index, tag, value = None, userdata = None): self.script.append({", "# (at your option) any later version. # # dxdiff is distributed in", "can redistribute it and/or modify # it under the terms of the GNU", "Free Software Foundation, either version 3 of the License, or # (at your", "Software Foundation, either version 3 of the License, or # (at your option)", "= True) def __len__(self): return len(self.script) def __getitem__(self, key): return self.script[key] def __iter__(self):", "return etree.ElementTree(tree) def write(self, path): self.to_xml().write(path, pretty_print = True, xml_declaration = True, encoding", "but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or" ]
[ "= logging.getLogger('chatserver') app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(),", "%(threadName)s %(message)s') log = logging.getLogger('chatserver') app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def", "format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app = Flask(__name__) messages = []", "messages = [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages) return", "request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log =", "logging from time import time from flask import Flask, request PLAIN_HEADER = {'Content-Type':", "time import time from flask import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'}", "message): messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\" + str(messages), 200, PLAIN_HEADER", "who, message)) print(messages) return \"Message saved.\\n\" + str(messages), 200, PLAIN_HEADER app.run(host='localhost', debug=True, threaded=True)", "import logging from time import time from flask import Flask, request PLAIN_HEADER =", "[] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\"", "%(message)s') log = logging.getLogger('chatserver') app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who,", "import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s')", "log = logging.getLogger('chatserver') app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who, message):", "= {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app", "= Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who, message))", "%(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>')", "request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\" + str(messages), 200, PLAIN_HEADER app.run(host='localhost', debug=True,", "time from flask import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s", "flask import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s", "logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app = Flask(__name__) messages =", "from time import time from flask import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain;", "= [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message", "PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver')", "charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app = Flask(__name__) messages", "post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\" + str(messages), 200,", "'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app = Flask(__name__)", "logging.getLogger('chatserver') app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr,", "Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log", "Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages)", "@app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\" +", "app = Flask(__name__) messages = [] @app.route('/post/<who>/<message>') def post_message(who, message): messages.append((time(), request.remote_addr, who,", "import time from flask import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG,", "from flask import Flask, request PLAIN_HEADER = {'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s", "{'Content-Type': 'text/plain; charset=utf-8'} logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)s %(threadName)s %(message)s') log = logging.getLogger('chatserver') app =", "def post_message(who, message): messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\" + str(messages),", "messages.append((time(), request.remote_addr, who, message)) print(messages) return \"Message saved.\\n\" + str(messages), 200, PLAIN_HEADER app.run(host='localhost'," ]
[ "total -= node.val doCount(total) path.append(node.val) if node.left is not None: dfs(node.left) if node.right", ":type total: int :rtype: int \"\"\" if root is None: return 0 path", "Definition for a binary tree node. # class TreeNode: # def __init__(self, x):", "None: return 0 path = [] count = 0 def doCount(partial): nonlocal count", "def doCount(partial): nonlocal count if partial == 0: count += 1 for val", "count += 1 for val in path: partial += val if partial ==", "Solution: def pathSum(self, root, total): \"\"\" :type root: TreeNode :type total: int :rtype:", "class TreeNode: # def __init__(self, x): # self.val = x # self.left =", "# self.right = None class Solution: def pathSum(self, root, total): \"\"\" :type root:", "if root is None: return 0 path = [] count = 0 def", "count = 0 def doCount(partial): nonlocal count if partial == 0: count +=", "__init__(self, x): # self.val = x # self.left = None # self.right =", "self.val = x # self.left = None # self.right = None class Solution:", "0: count += 1 def dfs(node): nonlocal total total -= node.val doCount(total) path.append(node.val)", "[] count = 0 def doCount(partial): nonlocal count if partial == 0: count", ":type root: TreeNode :type total: int :rtype: int \"\"\" if root is None:", "int :rtype: int \"\"\" if root is None: return 0 path = []", "self.left = None # self.right = None class Solution: def pathSum(self, root, total):", "int \"\"\" if root is None: return 0 path = [] count =", "def dfs(node): nonlocal total total -= node.val doCount(total) path.append(node.val) if node.left is not", "None # self.right = None class Solution: def pathSum(self, root, total): \"\"\" :type", "count += 1 def dfs(node): nonlocal total total -= node.val doCount(total) path.append(node.val) if", "root: TreeNode :type total: int :rtype: int \"\"\" if root is None: return", "for a binary tree node. # class TreeNode: # def __init__(self, x): #", "is None: return 0 path = [] count = 0 def doCount(partial): nonlocal", "+= 1 def dfs(node): nonlocal total total -= node.val doCount(total) path.append(node.val) if node.left", "node.val doCount(total) path.append(node.val) if node.left is not None: dfs(node.left) if node.right is not", "0 def doCount(partial): nonlocal count if partial == 0: count += 1 for", "self.right = None class Solution: def pathSum(self, root, total): \"\"\" :type root: TreeNode", "\"\"\" if root is None: return 0 path = [] count = 0", "# Definition for a binary tree node. # class TreeNode: # def __init__(self,", "1 for val in path: partial += val if partial == 0: count", "+= 1 for val in path: partial += val if partial == 0:", "# class TreeNode: # def __init__(self, x): # self.val = x # self.left", "a binary tree node. # class TreeNode: # def __init__(self, x): # self.val", "0: count += 1 for val in path: partial += val if partial", "1 def dfs(node): nonlocal total total -= node.val doCount(total) path.append(node.val) if node.left is", "partial == 0: count += 1 def dfs(node): nonlocal total total -= node.val", "def pathSum(self, root, total): \"\"\" :type root: TreeNode :type total: int :rtype: int", "is not None: dfs(node.left) if node.right is not None: dfs(node.right) total += path.pop()", "return 0 path = [] count = 0 def doCount(partial): nonlocal count if", "if partial == 0: count += 1 for val in path: partial +=", "node. # class TreeNode: # def __init__(self, x): # self.val = x #", "= x # self.left = None # self.right = None class Solution: def", "in path: partial += val if partial == 0: count += 1 def", "-= node.val doCount(total) path.append(node.val) if node.left is not None: dfs(node.left) if node.right is", "if node.left is not None: dfs(node.left) if node.right is not None: dfs(node.right) total", "path: partial += val if partial == 0: count += 1 def dfs(node):", "TreeNode: # def __init__(self, x): # self.val = x # self.left = None", "x # self.left = None # self.right = None class Solution: def pathSum(self,", "nonlocal count if partial == 0: count += 1 for val in path:", "path.append(node.val) if node.left is not None: dfs(node.left) if node.right is not None: dfs(node.right)", "partial == 0: count += 1 for val in path: partial += val", "total): \"\"\" :type root: TreeNode :type total: int :rtype: int \"\"\" if root", "0 path = [] count = 0 def doCount(partial): nonlocal count if partial", "# self.left = None # self.right = None class Solution: def pathSum(self, root,", "binary tree node. # class TreeNode: # def __init__(self, x): # self.val =", "# self.val = x # self.left = None # self.right = None class", "tree node. # class TreeNode: # def __init__(self, x): # self.val = x", "TreeNode :type total: int :rtype: int \"\"\" if root is None: return 0", "total: int :rtype: int \"\"\" if root is None: return 0 path =", "= None # self.right = None class Solution: def pathSum(self, root, total): \"\"\"", "# def __init__(self, x): # self.val = x # self.left = None #", "val in path: partial += val if partial == 0: count += 1", "not None: dfs(node.left) if node.right is not None: dfs(node.right) total += path.pop() dfs(root)", "pathSum(self, root, total): \"\"\" :type root: TreeNode :type total: int :rtype: int \"\"\"", "= None class Solution: def pathSum(self, root, total): \"\"\" :type root: TreeNode :type", "root is None: return 0 path = [] count = 0 def doCount(partial):", "path = [] count = 0 def doCount(partial): nonlocal count if partial ==", "for val in path: partial += val if partial == 0: count +=", "\"\"\" :type root: TreeNode :type total: int :rtype: int \"\"\" if root is", "count if partial == 0: count += 1 for val in path: partial", "dfs(node): nonlocal total total -= node.val doCount(total) path.append(node.val) if node.left is not None:", "nonlocal total total -= node.val doCount(total) path.append(node.val) if node.left is not None: dfs(node.left)", "total total -= node.val doCount(total) path.append(node.val) if node.left is not None: dfs(node.left) if", "doCount(partial): nonlocal count if partial == 0: count += 1 for val in", "root, total): \"\"\" :type root: TreeNode :type total: int :rtype: int \"\"\" if", "x): # self.val = x # self.left = None # self.right = None", "== 0: count += 1 for val in path: partial += val if", "val if partial == 0: count += 1 def dfs(node): nonlocal total total", "None class Solution: def pathSum(self, root, total): \"\"\" :type root: TreeNode :type total:", "None: dfs(node.left) if node.right is not None: dfs(node.right) total += path.pop() dfs(root) return", "+= val if partial == 0: count += 1 def dfs(node): nonlocal total", "if partial == 0: count += 1 def dfs(node): nonlocal total total -=", "class Solution: def pathSum(self, root, total): \"\"\" :type root: TreeNode :type total: int", "dfs(node.left) if node.right is not None: dfs(node.right) total += path.pop() dfs(root) return count", "doCount(total) path.append(node.val) if node.left is not None: dfs(node.left) if node.right is not None:", ":rtype: int \"\"\" if root is None: return 0 path = [] count", "partial += val if partial == 0: count += 1 def dfs(node): nonlocal", "= 0 def doCount(partial): nonlocal count if partial == 0: count += 1", "= [] count = 0 def doCount(partial): nonlocal count if partial == 0:", "node.left is not None: dfs(node.left) if node.right is not None: dfs(node.right) total +=", "def __init__(self, x): # self.val = x # self.left = None # self.right", "== 0: count += 1 def dfs(node): nonlocal total total -= node.val doCount(total)" ]
[ "print(\"Received event: \" + json.dumps(event, indent=4)) for record in event['Records']: # Kinesis data", "json.dumps(event, indent=4)) for record in event['Records']: # Kinesis data is base64 encoded so", "OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from word", "''' reworded = '' for letter in word: if letter not in LETTERS:", "for record in event['Records']: # Kinesis data is base64 encoded so decode here", "LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from word ''' reworded =", "boto3 import base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes", "return True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event: \" + json.dumps(event,", "try: payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr'] # Filter non", "False return True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event: \" +", "letter in word: if letter not in LETTERS: continue reworded = reworded +", "non words words_filtered = [] for w in words_arr: reworded = reword(w) if", "is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data,", "= payload.get('execution_id') words_arr = payload['words_arr'] # Filter non words words_filtered = [] for", "filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4)) for record", "= json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as", "reword(word): ''' Removes non-letters from word ''' reworded = '' for letter in", "indent=4)) for record in event['Records']: # Kinesis data is base64 encoded so decode", "payload: \" + payload) try: payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr =", "'' for letter in word: if letter not in LETTERS: continue reworded =", "Data=data, PartitionKey=data) except Exception as ex: print \"Error processing record, error=%s\" % str(object=ex)", "payload.get('execution_id') words_arr = payload['words_arr'] # Filter non words words_filtered = [] for w", "is_word(word): if len(word) <= 1: return False return True def filter_out_non_words(event, context): kinesis", "}) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex: print \"Error processing record, error=%s\"", "event: \" + json.dumps(event, indent=4)) for record in event['Records']: # Kinesis data is", "json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr'] # Filter non words words_filtered =", "execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex: print \"Error", "if letter not in LETTERS: continue reworded = reworded + letter return reworded", "data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception", "+ json.dumps(event, indent=4)) for record in event['Records']: # Kinesis data is base64 encoded", "payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try: payload = json.loads(payload) execution_id", "\" + payload) try: payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr']", "reworded: continue if not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered':", "kinesis = boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4)) for record in event['Records']:", "for letter in word: if letter not in LETTERS: continue reworded = reworded", "payload) try: payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr'] # Filter", "print(\"Decoded payload: \" + payload) try: payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr", "# Filter non words words_filtered = [] for w in words_arr: reworded =", "base64 encoded so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload)", "reword(w) if not reworded: continue if not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({", "def is_word(word): if len(word) <= 1: return False return True def filter_out_non_words(event, context):", "= payload['words_arr'] # Filter non words words_filtered = [] for w in words_arr:", "reworded = reword(w) if not reworded: continue if not is_word(reworded): continue words_filtered.append(reworded) data", "'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from word ''' reworded = '' for", "import json import boto3 import base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def", "event['Records']: # Kinesis data is base64 encoded so decode here payload = base64.b64decode(record['kinesis']['data'])", "if len(word) <= 1: return False return True def filter_out_non_words(event, context): kinesis =", "words words_filtered = [] for w in words_arr: reworded = reword(w) if not", "data is base64 encoded so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \"", "words_arr = payload['words_arr'] # Filter non words words_filtered = [] for w in", "in words_arr: reworded = reword(w) if not reworded: continue if not is_word(reworded): continue", "reworded def is_word(word): if len(word) <= 1: return False return True def filter_out_non_words(event,", "= [] for w in words_arr: reworded = reword(w) if not reworded: continue", "\" + json.dumps(event, indent=4)) for record in event['Records']: # Kinesis data is base64", "'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex: print \"Error processing", "+ letter return reworded def is_word(word): if len(word) <= 1: return False return", "from word ''' reworded = '' for letter in word: if letter not", "w in words_arr: reworded = reword(w) if not reworded: continue if not is_word(reworded):", "in event['Records']: # Kinesis data is base64 encoded so decode here payload =", "= json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr'] # Filter non words words_filtered", "words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except", "import base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters", "# Kinesis data is base64 encoded so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded", "record in event['Records']: # Kinesis data is base64 encoded so decode here payload", "words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex: print \"Error processing record,", "Exception as ex: print \"Error processing record, error=%s\" % str(object=ex) return 'Processed all", "return False return True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event: \"", "if not reworded: continue if not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id':", "= reworded + letter return reworded def is_word(word): if len(word) <= 1: return", "letter return reworded def is_word(word): if len(word) <= 1: return False return True", "Kinesis data is base64 encoded so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload:", "letter not in LETTERS: continue reworded = reworded + letter return reworded def", "kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex: print \"Error processing record, error=%s\" %", "len(word) <= 1: return False return True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis')", "= reword(w) if not reworded: continue if not is_word(reworded): continue words_filtered.append(reworded) data =", "json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex:", "= 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from word ''' reworded = ''", "reworded = reworded + letter return reworded def is_word(word): if len(word) <= 1:", "continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data)", "not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT,", "'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from word ''' reworded", "in LETTERS: continue reworded = reworded + letter return reworded def is_word(word): if", "<= 1: return False return True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received", "json import boto3 import base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word):", "continue if not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered", "= base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try: payload = json.loads(payload) execution_id =", "in word: if letter not in LETTERS: continue reworded = reworded + letter", "<filename>examples/wordcount/lambda_filter.py import json import boto3 import base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'", "context): kinesis = boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4)) for record in", "so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try: payload", "not reworded: continue if not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id,", "base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from", "not in LETTERS: continue reworded = reworded + letter return reworded def is_word(word):", "base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try: payload = json.loads(payload) execution_id = payload.get('execution_id')", "def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4)) for", "= boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4)) for record in event['Records']: #", "= 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): ''' Removes non-letters from word '''", "return reworded def is_word(word): if len(word) <= 1: return False return True def", "non-letters from word ''' reworded = '' for letter in word: if letter", "words_filtered = [] for w in words_arr: reworded = reword(w) if not reworded:", "[] for w in words_arr: reworded = reword(w) if not reworded: continue if", "PartitionKey=data) except Exception as ex: print \"Error processing record, error=%s\" % str(object=ex) return", "reworded = '' for letter in word: if letter not in LETTERS: continue", "for w in words_arr: reworded = reword(w) if not reworded: continue if not", "def reword(word): ''' Removes non-letters from word ''' reworded = '' for letter", "import boto3 import base64 OUTBOUND_EVENT = 'FileFiltered' LETTERS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' def reword(word): '''", "word: if letter not in LETTERS: continue reworded = reworded + letter return", "decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try: payload =", "payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr'] # Filter non words", "+ payload) try: payload = json.loads(payload) execution_id = payload.get('execution_id') words_arr = payload['words_arr'] #", "Filter non words words_filtered = [] for w in words_arr: reworded = reword(w)", "LETTERS: continue reworded = reworded + letter return reworded def is_word(word): if len(word)", "Removes non-letters from word ''' reworded = '' for letter in word: if", "except Exception as ex: print \"Error processing record, error=%s\" % str(object=ex) return 'Processed", "boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4)) for record in event['Records']: # Kinesis", "'execution_id': execution_id, 'words_filtered': words_filtered }) kinesis.put_record(StreamName=OUTBOUND_EVENT, Data=data, PartitionKey=data) except Exception as ex: print", "encoded so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try:", "''' Removes non-letters from word ''' reworded = '' for letter in word:", "payload['words_arr'] # Filter non words words_filtered = [] for w in words_arr: reworded", "execution_id = payload.get('execution_id') words_arr = payload['words_arr'] # Filter non words words_filtered = []", "if not is_word(reworded): continue words_filtered.append(reworded) data = json.dumps({ 'execution_id': execution_id, 'words_filtered': words_filtered })", "words_arr: reworded = reword(w) if not reworded: continue if not is_word(reworded): continue words_filtered.append(reworded)", "continue reworded = reworded + letter return reworded def is_word(word): if len(word) <=", "True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event: \" + json.dumps(event, indent=4))", "word ''' reworded = '' for letter in word: if letter not in", "here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" + payload) try: payload = json.loads(payload)", "reworded + letter return reworded def is_word(word): if len(word) <= 1: return False", "1: return False return True def filter_out_non_words(event, context): kinesis = boto3.client('kinesis') print(\"Received event:", "as ex: print \"Error processing record, error=%s\" % str(object=ex) return 'Processed all records.'", "= '' for letter in word: if letter not in LETTERS: continue reworded", "is base64 encoded so decode here payload = base64.b64decode(record['kinesis']['data']) print(\"Decoded payload: \" +" ]
[ "<gh_stars>1-10 file = input('Enter the file name: ') file_extsn= file.split(\".\") print(f\"The file extension", "= input('Enter the file name: ') file_extsn= file.split(\".\") print(f\"The file extension is {file_extsn[1]}\")", "file = input('Enter the file name: ') file_extsn= file.split(\".\") print(f\"The file extension is" ]
[ "'slower': p[0] = p[1] < p[5] def p_error(p): print(f\"Synax error at {p.value!r}\") #set", "\".\" + 'osxc', \"r\") for line in f: file_str = '' file_str +=", "'''statement : IFA VAR IFB statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement :", "VAR IFB statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN VAR' file_str", "'statement : OUTPUT expression' print(\"Coach says \" + str(p[2]) + \"!\") def p_statement_if(p):", "p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says \"", "ADD expression | expression SUBA SUBB expression | expression MULT expression | expression", "LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same': p[0] = p[1] == p[6] elif", "elif p[1] == 'finished' and p[2] == 'of': p[0] = p[0] - p[3]", "yacc from CoachLex import tokens #enviromental variables enviro_vars = {} def p_statement_assign(p): 'statement", "VARINT VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement : expression' def p_statement_output(p):", "statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN VAR' file_str = \"\"", "import ply.yacc as yacc from CoachLex import tokens #enviromental variables enviro_vars = {}", "p[1] def p_expression_var(p): 'expression : VAR' try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined", "'split': p[0] = p[1] / p[3] def p_expression_number(p): 'expression : NUMBER' p[0] =", "elif p[3] == 'faster': p[0] = p[1] > p[5] elif p[3] == 'slower':", "line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression : expression ADD expression | expression", ": VARINT VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement : expression' def", "p_statement_file_in(p): 'statement : FILEIN VAR' file_str = \"\" f = open(p[2] + \".\"", "import tokens #enviromental variables enviro_vars = {} def p_statement_assign(p): 'statement : VARINT VAR", "str(p[2]) + \"!\") def p_statement_if(p): '''statement : IFA VAR IFB statement''' if p[4]:", "#Basic Math def p_expression_basicop(p): '''expression : expression ADD expression | expression SUBA SUBB", "> p[5] elif p[3] == 'slower': p[0] = p[1] < p[5] def p_error(p):", "'finished' and p[2] == 'of': p[0] = p[0] - p[3] elif p[2] ==", "== p[6] elif p[3] == 'faster': p[0] = p[1] > p[5] elif p[3]", "p[0] elif p[1] == 'finished' and p[2] == 'of': p[0] = p[0] -", ": VAR' try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to 0\")", "expression''' if p[4] == 'same': p[0] = p[1] == p[6] elif p[3] ==", "p[3] elif p[2] == 'split': p[0] = p[1] / p[3] def p_expression_number(p): 'expression", "enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to 0\") p[0] = 0 def p_comparison_binop(p):", "yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression : expression ADD expression | expression SUBA", "= p[1] < p[5] def p_error(p): print(f\"Synax error at {p.value!r}\") #set up yacc", ": expression ADD expression | expression SUBA SUBB expression | expression MULT expression", "+ str(p[2]) + \"!\") def p_statement_if(p): '''statement : IFA VAR IFB statement''' if", "tokens #enviromental variables enviro_vars = {} def p_statement_assign(p): 'statement : VARINT VAR expression'", "def p_statement_file_in(p): 'statement : FILEIN VAR' file_str = \"\" f = open(p[2] +", "SUBA SUBB expression | expression MULT expression | expression DIV expression''' if p[1]", "- p[3] elif p[2] == 'by': p[0] = p[1] * p[3] elif p[2]", "file_str = \"\" f = open(p[2] + \".\" + 'osxc', \"r\") for line", "{} def p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p):", "| expression MULT expression | expression DIV expression''' if p[1] == \"add\": p[0]", "from CoachLex import tokens #enviromental variables enviro_vars = {} def p_statement_assign(p): 'statement :", "'statement : VARINT VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement : expression'", "elif p[3] == 'slower': p[0] = p[1] < p[5] def p_error(p): print(f\"Synax error", "p[0] = p[1] == p[6] elif p[3] == 'faster': p[0] = p[1] >", "| expression SUBA SUBB expression | expression MULT expression | expression DIV expression'''", "expression''' if p[1] == \"add\": p[0] = p[2] + p[0] elif p[1] ==", "/ p[3] def p_expression_number(p): 'expression : NUMBER' p[0] = p[1] def p_expression_var(p): 'expression", "| expression DIV expression''' if p[1] == \"add\": p[0] = p[2] + p[0]", "p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN VAR' file_str = \"\" f =", "'''expression : expression ADD expression | expression SUBA SUBB expression | expression MULT", "at {p.value!r}\") #set up yacc yaccer = yacc.yacc() while True: try: s =", "+ \"!\") def p_statement_if(p): '''statement : IFA VAR IFB statement''' if p[4]: p[6]", "p[6] def p_statement_file_in(p): 'statement : FILEIN VAR' file_str = \"\" f = open(p[2]", "def p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement", "+ 'osxc', \"r\") for line in f: file_str = '' file_str += line.rstrip('\\n')", "= p[0] - p[3] elif p[2] == 'by': p[0] = p[1] * p[3]", "0\") p[0] = 0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression", "p[1] < p[5] def p_error(p): print(f\"Synax error at {p.value!r}\") #set up yacc yaccer", "def p_expression_basicop(p): '''expression : expression ADD expression | expression SUBA SUBB expression |", "p[3] == 'slower': p[0] = p[1] < p[5] def p_error(p): print(f\"Synax error at", "p_error(p): print(f\"Synax error at {p.value!r}\") #set up yacc yaccer = yacc.yacc() while True:", "enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement : OUTPUT", "FILEIN VAR' file_str = \"\" f = open(p[2] + \".\" + 'osxc', \"r\")", "| expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same': p[0] = p[1]", "p[2] == 'of': p[0] = p[0] - p[3] elif p[2] == 'by': p[0]", "file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression : expression ADD expression", "expression DIV expression''' if p[1] == \"add\": p[0] = p[2] + p[0] elif", "= yacc.yacc() while True: try: s = input('> ') except EOFError: break yaccer.parse(s)", "expression | expression DIV expression''' if p[1] == \"add\": p[0] = p[2] +", "= {} def p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]] = p[3] def", "p[1] > p[5] elif p[3] == 'slower': p[0] = p[1] < p[5] def", "if p[1] == \"add\": p[0] = p[2] + p[0] elif p[1] == 'finished'", "resorting to 0\") p[0] = 0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA", "DIV expression''' if p[1] == \"add\": p[0] = p[2] + p[0] elif p[1]", "def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says \" + str(p[2]) + \"!\")", ": IFA VAR IFB statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN", "def p_statement_if(p): '''statement : IFA VAR IFB statement''' if p[4]: p[6] def p_statement_file_in(p):", "== \"add\": p[0] = p[2] + p[0] elif p[1] == 'finished' and p[2]", "p[3] def p_expression_number(p): 'expression : NUMBER' p[0] = p[1] def p_expression_var(p): 'expression :", "== 'faster': p[0] = p[1] > p[5] elif p[3] == 'slower': p[0] =", "= '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression : expression", "#enviromental variables enviro_vars = {} def p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]]", "EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN", "= enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to 0\") p[0] = 0 def", "= p[3] def p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement : OUTPUT expression'", "p[1] / p[3] def p_expression_number(p): 'expression : NUMBER' p[0] = p[1] def p_expression_var(p):", "| expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if", "file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression :", "GREATLESSTHANB expression''' if p[4] == 'same': p[0] = p[1] == p[6] elif p[3]", "expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression", "if p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN VAR' file_str = \"\" f", "GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] ==", "+= line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression : expression ADD expression |", "p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to 0\") p[0] = 0", "\" + str(p[2]) + \"!\") def p_statement_if(p): '''statement : IFA VAR IFB statement'''", "ply.yacc as yacc from CoachLex import tokens #enviromental variables enviro_vars = {} def", "+ p[0] elif p[1] == 'finished' and p[2] == 'of': p[0] = p[0]", "in f: file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p):", "'expression : VAR' try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to", "'expression : NUMBER' p[0] = p[1] def p_expression_var(p): 'expression : VAR' try: p[0]", ": expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression |", "IFA VAR IFB statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN VAR'", "= open(p[2] + \".\" + 'osxc', \"r\") for line in f: file_str =", "* p[3] elif p[2] == 'split': p[0] = p[1] / p[3] def p_expression_number(p):", "= p[1] def p_expression_var(p): 'expression : VAR' try: p[0] = enviro_vars[p[1]] except LookupError:", "expression | expression MULT expression | expression DIV expression''' if p[1] == \"add\":", "p[1] == \"add\": p[0] = p[2] + p[0] elif p[1] == 'finished' and", "= p[2] + p[0] elif p[1] == 'finished' and p[2] == 'of': p[0]", "LookupError: print(\"undefined var, resorting to 0\") p[0] = 0 def p_comparison_binop(p): '''comparison :", "print(f\"Synax error at {p.value!r}\") #set up yacc yaccer = yacc.yacc() while True: try:", "p[1] == p[6] elif p[3] == 'faster': p[0] = p[1] > p[5] elif", "p[0] = p[1] def p_expression_var(p): 'expression : VAR' try: p[0] = enviro_vars[p[1]] except", "f = open(p[2] + \".\" + 'osxc', \"r\") for line in f: file_str", "+ \".\" + 'osxc', \"r\") for line in f: file_str = '' file_str", "def p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says", "p[2] == 'by': p[0] = p[1] * p[3] elif p[2] == 'split': p[0]", "expression | expression SUBA SUBB expression | expression MULT expression | expression DIV", "\"\" f = open(p[2] + \".\" + 'osxc', \"r\") for line in f:", "def p_expression_number(p): 'expression : NUMBER' p[0] = p[1] def p_expression_var(p): 'expression : VAR'", "= 0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression", "MULT expression | expression DIV expression''' if p[1] == \"add\": p[0] = p[2]", "== 'slower': p[0] = p[1] < p[5] def p_error(p): print(f\"Synax error at {p.value!r}\")", "yaccer = yacc.yacc() while True: try: s = input('> ') except EOFError: break", "OUTPUT expression' print(\"Coach says \" + str(p[2]) + \"!\") def p_statement_if(p): '''statement :", "0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA", "GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same': p[0] = p[1] == p[6]", "\"r\") for line in f: file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic", "expression SUBA SUBB expression | expression MULT expression | expression DIV expression''' if", "= \"\" f = open(p[2] + \".\" + 'osxc', \"r\") for line in", "VAR' try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to 0\") p[0]", "p_expression_number(p): 'expression : NUMBER' p[0] = p[1] def p_expression_var(p): 'expression : VAR' try:", "GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA", ": FILEIN VAR' file_str = \"\" f = open(p[2] + \".\" + 'osxc',", "elif p[2] == 'by': p[0] = p[1] * p[3] elif p[2] == 'split':", "p[0] - p[3] elif p[2] == 'by': p[0] = p[1] * p[3] elif", "p_expression_basicop(p): '''expression : expression ADD expression | expression SUBA SUBB expression | expression", "p[1] == 'finished' and p[2] == 'of': p[0] = p[0] - p[3] elif", "p[4] == 'same': p[0] = p[1] == p[6] elif p[3] == 'faster': p[0]", "expression ADD expression | expression SUBA SUBB expression | expression MULT expression |", "== 'split': p[0] = p[1] / p[3] def p_expression_number(p): 'expression : NUMBER' p[0]", "CoachLex import tokens #enviromental variables enviro_vars = {} def p_statement_assign(p): 'statement : VARINT", "p[6] elif p[3] == 'faster': p[0] = p[1] > p[5] elif p[3] ==", "'same': p[0] = p[1] == p[6] elif p[3] == 'faster': p[0] = p[1]", "VAR' file_str = \"\" f = open(p[2] + \".\" + 'osxc', \"r\") for", "'statement : FILEIN VAR' file_str = \"\" f = open(p[2] + \".\" +", "expression' print(\"Coach says \" + str(p[2]) + \"!\") def p_statement_if(p): '''statement : IFA", "== 'same': p[0] = p[1] == p[6] elif p[3] == 'faster': p[0] =", "def p_expression_var(p): 'expression : VAR' try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var,", "as yacc from CoachLex import tokens #enviromental variables enviro_vars = {} def p_statement_assign(p):", "'by': p[0] = p[1] * p[3] elif p[2] == 'split': p[0] = p[1]", "expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4]", "expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression'''", "\"!\") def p_statement_if(p): '''statement : IFA VAR IFB statement''' if p[4]: p[6] def", "'' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression : expression ADD", "expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same': p[0] =", "up yacc yaccer = yacc.yacc() while True: try: s = input('> ') except", "for line in f: file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math", "says \" + str(p[2]) + \"!\") def p_statement_if(p): '''statement : IFA VAR IFB", "p[0] = p[1] > p[5] elif p[3] == 'slower': p[0] = p[1] <", "'of': p[0] = p[0] - p[3] elif p[2] == 'by': p[0] = p[1]", "'''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression", "p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says \" + str(p[2]) + \"!\") def", "variables enviro_vars = {} def p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]] =", "'faster': p[0] = p[1] > p[5] elif p[3] == 'slower': p[0] = p[1]", "f: file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def p_expression_basicop(p): '''expression", "p[3] elif p[2] == 'by': p[0] = p[1] * p[3] elif p[2] ==", "p[1] * p[3] elif p[2] == 'split': p[0] = p[1] / p[3] def", "IFB statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement : FILEIN VAR' file_str =", "except LookupError: print(\"undefined var, resorting to 0\") p[0] = 0 def p_comparison_binop(p): '''comparison", "p[5] def p_error(p): print(f\"Synax error at {p.value!r}\") #set up yacc yaccer = yacc.yacc()", "\"add\": p[0] = p[2] + p[0] elif p[1] == 'finished' and p[2] ==", "p[0] = p[1] < p[5] def p_error(p): print(f\"Synax error at {p.value!r}\") #set up", "error at {p.value!r}\") #set up yacc yaccer = yacc.yacc() while True: try: s", "< p[5] def p_error(p): print(f\"Synax error at {p.value!r}\") #set up yacc yaccer =", "if p[4] == 'same': p[0] = p[1] == p[6] elif p[3] == 'faster':", "'osxc', \"r\") for line in f: file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str)", "p[0] = p[1] * p[3] elif p[2] == 'split': p[0] = p[1] /", "= p[1] / p[3] def p_expression_number(p): 'expression : NUMBER' p[0] = p[1] def", "= p[1] > p[5] elif p[3] == 'slower': p[0] = p[1] < p[5]", "and p[2] == 'of': p[0] = p[0] - p[3] elif p[2] == 'by':", "elif p[2] == 'split': p[0] = p[1] / p[3] def p_expression_number(p): 'expression :", "p[5] elif p[3] == 'slower': p[0] = p[1] < p[5] def p_error(p): print(f\"Synax", "def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN", "'statement : expression' def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says \" +", "expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same': p[0] = p[1] ==", "{p.value!r}\") #set up yacc yaccer = yacc.yacc() while True: try: s = input('>", "p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB", "print(\"undefined var, resorting to 0\") p[0] = 0 def p_comparison_binop(p): '''comparison : expression", "== 'finished' and p[2] == 'of': p[0] = p[0] - p[3] elif p[2]", "VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement", "#set up yacc yaccer = yacc.yacc() while True: try: s = input('> ')", "expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement :", "print(\"Coach says \" + str(p[2]) + \"!\") def p_statement_if(p): '''statement : IFA VAR", "open(p[2] + \".\" + 'osxc', \"r\") for line in f: file_str = ''", "p[3] def p_statement_expr(p): 'statement : expression' def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach", "p[2] == 'split': p[0] = p[1] / p[3] def p_expression_number(p): 'expression : NUMBER'", "yacc yaccer = yacc.yacc() while True: try: s = input('> ') except EOFError:", "enviro_vars = {} def p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]] = p[3]", "p[0] = 0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB expression |", "p[0] = p[2] + p[0] elif p[1] == 'finished' and p[2] == 'of':", "SUBB expression | expression MULT expression | expression DIV expression''' if p[1] ==", "== 'of': p[0] = p[0] - p[3] elif p[2] == 'by': p[0] =", ": NUMBER' p[0] = p[1] def p_expression_var(p): 'expression : VAR' try: p[0] =", ": expression' def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says \" + str(p[2])", "p[2] + p[0] elif p[1] == 'finished' and p[2] == 'of': p[0] =", "GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same':", "p[0] = p[1] / p[3] def p_expression_number(p): 'expression : NUMBER' p[0] = p[1]", "= p[1] * p[3] elif p[2] == 'split': p[0] = p[1] / p[3]", "== 'by': p[0] = p[1] * p[3] elif p[2] == 'split': p[0] =", "try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting to 0\") p[0] =", "expression' def p_statement_output(p): 'statement : OUTPUT expression' print(\"Coach says \" + str(p[2]) +", "NUMBER' p[0] = p[1] def p_expression_var(p): 'expression : VAR' try: p[0] = enviro_vars[p[1]]", ": OUTPUT expression' print(\"Coach says \" + str(p[2]) + \"!\") def p_statement_if(p): '''statement", "p[3] == 'faster': p[0] = p[1] > p[5] elif p[3] == 'slower': p[0]", "p_statement_if(p): '''statement : IFA VAR IFB statement''' if p[4]: p[6] def p_statement_file_in(p): 'statement", "p[0] = p[0] - p[3] elif p[2] == 'by': p[0] = p[1] *", "p_statement_assign(p): 'statement : VARINT VAR expression' enviro_vars[p[2]] = p[3] def p_statement_expr(p): 'statement :", "Math def p_expression_basicop(p): '''expression : expression ADD expression | expression SUBA SUBB expression", "= p[1] == p[6] elif p[3] == 'faster': p[0] = p[1] > p[5]", "var, resorting to 0\") p[0] = 0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA", "to 0\") p[0] = 0 def p_comparison_binop(p): '''comparison : expression GREATLESSTHANA EQUALTOA EQUALTOB", "p_expression_var(p): 'expression : VAR' try: p[0] = enviro_vars[p[1]] except LookupError: print(\"undefined var, resorting", "line in f: file_str = '' file_str += line.rstrip('\\n') yaccer.parse(file_str) #Basic Math def", "def p_error(p): print(f\"Synax error at {p.value!r}\") #set up yacc yaccer = yacc.yacc() while", "GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB expression''' if p[4] == 'same': p[0]", "expression MULT expression | expression DIV expression''' if p[1] == \"add\": p[0] =", "EQUALTOB expression | expression GREATLESSTHANA GREATERTHAN GREATLESSTHANB expression | expression GREATLESSTHANA LESSTHAN GREATLESSTHANB" ]
[ "( 'name', ) search_fields = ( 'name', ) @transaction.atomic def merge_ingredients(self, request, queryset):", "'At least two ingredients need to be selected!', messages.WARNING) return main = queryset.first()", "import Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag)", "django.db import transaction from django.db.models import Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient,", "request, 'At least two ingredients need to be selected!', messages.WARNING) return main =", "autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin):", "RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count', ) list_filter", "( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display =", "= ( RecipeIngredientInline, ) def get_queryset(self, request): qs = super().get_queryset(request) return qs.prefetch_related(Prefetch('tags', Tag.objects.order_by('name')))", "= RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', ) inlines =", "@admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields = ( 'name', )", "= ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count',", "= ( 'tags', ) search_fields = ( 'name', ) inlines = ( RecipeIngredientInline,", "'name', 'tag_str', 'view_count', ) list_filter = ( 'tags', ) search_fields = ( 'name',", "from django.db.models import Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage,", "< 2: self.message_user( request, 'At least two ingredients need to be selected!', messages.WARNING)", "admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields = ( 'name',", "def merge_ingredients(self, request, queryset): if len(queryset) < 2: self.message_user( request, 'At least two", "search_fields = ( 'name', ) inlines = ( RecipeIngredientInline, ) def get_queryset(self, request):", "inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str',", "RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', ) inlines = (", "( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count', )", "= ( 'name', 'tag_str', 'view_count', ) list_filter = ( 'tags', ) search_fields =", "others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user(", "import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display", "model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', ) inlines", "= 'Merge selected ingredients' actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model =", "= ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = ( 'ingredient',", "( 'name', ) @transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset) < 2: self.message_user(", "into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions = ( merge_ingredients,", "messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline):", "RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count', ) list_filter = ( 'tags', )", "self.message_user( request, 'At least two ingredients need to be selected!', messages.WARNING) return main", "two ingredients need to be selected!', messages.WARNING) return main = queryset.first() others =", "merge_ingredients(self, request, queryset): if len(queryset) < 2: self.message_user( request, 'At least two ingredients", "model = RecipeIngredient autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage", "selected ingredients' actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields", "messages.WARNING) return main = queryset.first() others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk", "= ( 'name', ) inlines = ( RecipeIngredientInline, ) def get_queryset(self, request): qs", "messages from django.db import transaction from django.db.models import Prefetch from recipe.models import Ingredient,", "request, '{} ingredients were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected", ") class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline):", "'name', ) @transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset) < 2: self.message_user( request,", "len(queryset) < 2: self.message_user( request, 'At least two ingredients need to be selected!',", "queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request, '{}", "class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count', ) list_filter = ( 'tags',", "len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request, '{} ingredients", "@transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset) < 2: self.message_user( request, 'At least", "= ( 'name', ) @transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset) < 2:", "merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions = (", "list_display = ( 'day', 'recipe', ) inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class", ") class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day',", "'name', ) search_fields = ( 'name', ) @transaction.atomic def merge_ingredients(self, request, queryset): if", "IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields = ( 'name', ) @transaction.atomic def", ") search_fields = ( 'name', ) @transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset)", "= len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request, '{} ingredients were", "RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', )", "ingredients were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions", ") inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name',", "main = queryset.first() others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i", "from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class", "request, queryset): if len(queryset) < 2: self.message_user( request, 'At least two ingredients need", "= RecipeIngredient autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance)", "from django.contrib import admin, messages from django.db import transaction from django.db.models import Prefetch", "( 'day', 'recipe', ) inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display", "'view_count', ) list_filter = ( 'tags', ) search_fields = ( 'name', ) inlines", ") search_fields = ( 'name', ) inlines = ( RecipeIngredientInline, ) def get_queryset(self,", "RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields =", "'tag_str', 'view_count', ) list_filter = ( 'tags', ) search_fields = ( 'name', )", ") inlines = ( RecipeIngredientInline, ) def get_queryset(self, request): qs = super().get_queryset(request) return", "ingredients need to be selected!', messages.WARNING) return main = queryset.first() others = queryset[1:]", "least two ingredients need to be selected!', messages.WARNING) return main = queryset.first() others", "inlines = ( RecipeIngredientInline, ) def get_queryset(self, request): qs = super().get_queryset(request) return qs.prefetch_related(Prefetch('tags',", "search_fields = ( 'name', ) @transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset) <", "RecipeIngredient autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class", "( 'tags', ) search_fields = ( 'name', ) inlines = ( RecipeIngredientInline, )", "@admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', ) inlines = ( RecipeInstanceImageInline,", "{}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions = ( merge_ingredients, )", "i in others]).delete() self.message_user( request, '{} ingredients were merged into {}'.format(len_others, main), messages.SUCCESS)", "admin, messages from django.db import transaction from django.db.models import Prefetch from recipe.models import", "recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin):", "main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions = ( merge_ingredients, ) class", "RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model =", "queryset): if len(queryset) < 2: self.message_user( request, 'At least two ingredients need to", "if len(queryset) < 2: self.message_user( request, 'At least two ingredients need to be", "queryset.first() others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete()", "= ( 'day', 'recipe', ) inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin):", "for i in others]).delete() self.message_user( request, '{} ingredients were merged into {}'.format(len_others, main),", "RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', ) inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe)", "Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient)", "django.db.models import Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag", "import admin, messages from django.db import transaction from django.db.models import Prefetch from recipe.models", "= ( 'name', ) search_fields = ( 'name', ) @transaction.atomic def merge_ingredients(self, request,", "class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe', ) inlines = ( RecipeInstanceImageInline, )", "( 'name', 'tag_str', 'view_count', ) list_filter = ( 'tags', ) search_fields = (", "'recipe', ) inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = (", "RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request, '{} ingredients were merged into", "from django.db import transaction from django.db.models import Prefetch from recipe.models import Ingredient, Recipe,", "= queryset.first() others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in", "ingredients' actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields =", ") list_filter = ( 'tags', ) search_fields = ( 'name', ) inlines =", "list_display = ( 'name', 'tag_str', 'view_count', ) list_filter = ( 'tags', ) search_fields", "selected!', messages.WARNING) return main = queryset.first() others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main)", "need to be selected!', messages.WARNING) return main = queryset.first() others = queryset[1:] len_others", "'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = (", "actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = (", "'day', 'recipe', ) inlines = ( RecipeInstanceImageInline, ) @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display =", "RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', )", "list_display = ( 'name', ) search_fields = ( 'name', ) @transaction.atomic def merge_ingredients(self,", ") @transaction.atomic def merge_ingredients(self, request, queryset): if len(queryset) < 2: self.message_user( request, 'At", "( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = ( 'ingredient', )", "Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields = (", "others]).delete() self.message_user( request, '{} ingredients were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description =", "merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = ( 'ingredient', ) class", "Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request, '{} ingredients were merged into {}'.format(len_others,", "len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request, '{} ingredients were merged", "transaction from django.db.models import Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\", "'{} ingredients were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients'", ") @admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count', ) list_filter =", "2: self.message_user( request, 'At least two ingredients need to be selected!', messages.WARNING) return", "Ingredient, Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display =", "list_filter = ( 'tags', ) search_fields = ( 'name', ) inlines = (", "= queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for i in others]).delete() self.message_user( request,", "to be selected!', messages.WARNING) return main = queryset.first() others = queryset[1:] len_others =", "were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge selected ingredients' actions =", "class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient autocomplete_fields = ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model", "return main = queryset.first() others = queryset[1:] len_others = len(others) RecipeIngredient.objects.filter(ingredient__in=others).update(ingredient=main) Ingredient.objects.filter(pk__in=[i.pk for", "be selected!', messages.WARNING) return main = queryset.first() others = queryset[1:] len_others = len(others)", "RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name',", "merge_ingredients.short_description = 'Merge selected ingredients' actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model", "'name', ) inlines = ( RecipeIngredientInline, ) def get_queryset(self, request): qs = super().get_queryset(request)", "django.contrib import admin, messages from django.db import transaction from django.db.models import Prefetch from", "class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields = ( 'name', ) @transaction.atomic", "Recipe, RecipeIngredient, RecipeInstance, \\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = (", "( 'name', ) inlines = ( RecipeIngredientInline, ) def get_queryset(self, request): qs =", "= ( 'ingredient', ) class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display", "@admin.register(Recipe) class RecipeAdmin(admin.ModelAdmin): list_display = ( 'name', 'tag_str', 'view_count', ) list_filter = (", "'Merge selected ingredients' actions = ( merge_ingredients, ) class RecipeIngredientInline(admin.TabularInline): model = RecipeIngredient", "class RecipeInstanceImageInline(admin.TabularInline): model = RecipeInstanceImage @admin.register(RecipeInstance) class RecipeInstanceAdmin(admin.ModelAdmin): list_display = ( 'day', 'recipe',", "'tags', ) search_fields = ( 'name', ) inlines = ( RecipeIngredientInline, ) def", "self.message_user( request, '{} ingredients were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description = 'Merge", "import transaction from django.db.models import Prefetch from recipe.models import Ingredient, Recipe, RecipeIngredient, RecipeInstance,", "\\ RecipeInstanceImage, Tag admin.site.register(Tag) @admin.register(Ingredient) class IngredientAdmin(admin.ModelAdmin): list_display = ( 'name', ) search_fields", "in others]).delete() self.message_user( request, '{} ingredients were merged into {}'.format(len_others, main), messages.SUCCESS) merge_ingredients.short_description" ]
[ "under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create(", "# under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created =", "import json import os from tqdm import tqdm class Command(BaseCommand): \"\"\" Import a", "person JSON from Legiscan to the database. \"\"\" help = 'Import a folder", "\"\"\" from django.core.management.base import BaseCommand from general.models import Person from ls_importer.models import LSIDPerson", "= person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role']", "def handle(self, *args, **options): \"\"\" Make it happen. \"\"\" def json_to_person(json_path): json_data =", "to prevent edge cases where # a person may be stored twice in", "where # a person may be stored twice in Legiscan # under slightly", "full of person JSON from Legiscan to the database.' def handle(self, *args, **options):", "BaseCommand from general.models import Person from ls_importer.models import LSIDPerson import json import os", "# a person may be stored twice in Legiscan # under slightly different", "json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role", "import os from tqdm import tqdm class Command(BaseCommand): \"\"\" Import a folder (~/people)", "json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id']", "utf-8 -*- \"\"\" Import a folder (~/people) full of person JSON from Legiscan", "Person from ls_importer.models import LSIDPerson import json import os from tqdm import tqdm", "in Legiscan # under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object,", "tqdm class Command(BaseCommand): \"\"\" Import a folder (~/people) full of person JSON from", "person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id']", "LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file in tqdm(os.listdir(target_directory)): if", "\"\"\" help = 'Import a folder full of person JSON from Legiscan to", "it happen. \"\"\" def json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data) pj_unfold =", "(~/people) full of person JSON from Legiscan to the database. \"\"\" from django.core.management.base", "a folder full of person JSON from Legiscan to the database.' def handle(self,", "coding: utf-8 -*- \"\"\" Import a folder (~/people) full of person JSON from", "open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id =", "middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id,", "= pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix =", "os from tqdm import tqdm class Command(BaseCommand): \"\"\" Import a folder (~/people) full", "person JSON from Legiscan to the database. \"\"\" from django.core.management.base import BaseCommand from", "the database.' def handle(self, *args, **options): \"\"\" Make it happen. \"\"\" def json_to_person(json_path):", "json_data = open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] #", "from Legiscan to the database. \"\"\" from django.core.management.base import BaseCommand from general.models import", "full of person JSON from Legiscan to the database. \"\"\" from django.core.management.base import", "python # -*- coding: utf-8 -*- \"\"\" Import a folder (~/people) full of", "JSON from Legiscan to the database. \"\"\" help = 'Import a folder full", "import LSIDPerson import json import os from tqdm import tqdm class Command(BaseCommand): \"\"\"", "happen. \"\"\" def json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person']", "import Person from ls_importer.models import LSIDPerson import json import os from tqdm import", ") link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for", "person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role'] #", "# person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name']", "import BaseCommand from general.models import Person from ls_importer.models import LSIDPerson import json import", "} ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people')", "person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch structure", "database.' def handle(self, *args, **options): \"\"\" Make it happen. \"\"\" def json_to_person(json_path): json_data", "to the database.' def handle(self, *args, **options): \"\"\" Make it happen. \"\"\" def", "first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create(", "-*- coding: utf-8 -*- \"\"\" Import a folder (~/people) full of person JSON", "Import a folder (~/people) full of person JSON from Legiscan to the database.", "prevent edge cases where # a person may be stored twice in Legiscan", "= pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name", "from tqdm import tqdm class Command(BaseCommand): \"\"\" Import a folder (~/people) full of", "Legiscan to the database.' def handle(self, *args, **options): \"\"\" Make it happen. \"\"\"", "**options): \"\"\" Make it happen. \"\"\" def json_to_person(json_path): json_data = open(json_path) person_json =", "'Import a folder full of person JSON from Legiscan to the database.' def", "person may be stored twice in Legiscan # under slightly different names. try:", "Command(BaseCommand): \"\"\" Import a folder (~/people) full of person JSON from Legiscan to", "person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name", "pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch structure exists #", "def json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id =", "pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id']", "person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file in tqdm(os.listdir(target_directory)): if file.endswith(\".json\"): json_to_person(os.path.join(target_directory,", "pj_unfold['nickname'] # This try/catch structure exists # to prevent edge cases where #", "be stored twice in Legiscan # under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id", "stored twice in Legiscan # under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id )", "from ls_importer.models import LSIDPerson import json import os from tqdm import tqdm class", "person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] #", "# person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name =", "defaults={ 'nickname': person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory", "Make it happen. \"\"\" def json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data) pj_unfold", "person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname", "to the database. \"\"\" from django.core.management.base import BaseCommand from general.models import Person from", "folder (~/people) full of person JSON from Legiscan to the database. \"\"\" help", "from general.models import Person from ls_importer.models import LSIDPerson import json import os from", "LSIDPerson import json import os from tqdm import tqdm class Command(BaseCommand): \"\"\" Import", "JSON from Legiscan to the database. \"\"\" from django.core.management.base import BaseCommand from general.models", "last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object,", "suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, )", "different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name,", "JSON from Legiscan to the database.' def handle(self, *args, **options): \"\"\" Make it", "the database. \"\"\" from django.core.management.base import BaseCommand from general.models import Person from ls_importer.models", "= pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id =", "# person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] #", "This try/catch structure exists # to prevent edge cases where # a person", "import tqdm class Command(BaseCommand): \"\"\" Import a folder (~/people) full of person JSON", "class Command(BaseCommand): \"\"\" Import a folder (~/people) full of person JSON from Legiscan", "= pj_unfold['nickname'] # This try/catch structure exists # to prevent edge cases where", "= json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] #", "pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname']", "django.core.management.base import BaseCommand from general.models import Person from ls_importer.models import LSIDPerson import json", "Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object, link_created =", "a folder (~/people) full of person JSON from Legiscan to the database. \"\"\"", "exists # to prevent edge cases where # a person may be stored", "= pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch structure exists", "= pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name =", "# This try/catch structure exists # to prevent edge cases where # a", "pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name", "pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix']", "edge cases where # a person may be stored twice in Legiscan #", "= Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object, link_created", "= pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This", "lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname':", "handle(self, *args, **options): \"\"\" Make it happen. \"\"\" def json_to_person(json_path): json_data = open(json_path)", "a person may be stored twice in Legiscan # under slightly different names.", "person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name']", ") except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname,", "tqdm import tqdm class Command(BaseCommand): \"\"\" Import a folder (~/people) full of person", "the database. \"\"\" help = 'Import a folder full of person JSON from", "folder (~/people) full of person JSON from Legiscan to the database. \"\"\" from", "person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id", "person_nickname = pj_unfold['nickname'] # This try/catch structure exists # to prevent edge cases", "full of person JSON from Legiscan to the database. \"\"\" help = 'Import", "cases where # a person may be stored twice in Legiscan # under", "try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix,", "#!/usr/bin/env python # -*- coding: utf-8 -*- \"\"\" Import a folder (~/people) full", "lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file in tqdm(os.listdir(target_directory)): if file.endswith(\".json\"):", "pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id = pj_unfold['role_id'] # person_role =", "'nickname': person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory =", "person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } )", "may be stored twice in Legiscan # under slightly different names. try: LSIDPerson.objects.get(", "Legiscan to the database. \"\"\" help = 'Import a folder full of person", "\"\"\" Make it happen. \"\"\" def json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data)", "= pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname =", "link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file", "person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix", "person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch structure exists # to", "ls_importer.models import LSIDPerson import json import os from tqdm import tqdm class Command(BaseCommand):", "# to prevent edge cases where # a person may be stored twice", "(~/people) full of person JSON from Legiscan to the database. \"\"\" help =", "of person JSON from Legiscan to the database.' def handle(self, *args, **options): \"\"\"", "of person JSON from Legiscan to the database. \"\"\" help = 'Import a", "= 'Import a folder full of person JSON from Legiscan to the database.'", "structure exists # to prevent edge cases where # a person may be", "-*- \"\"\" Import a folder (~/people) full of person JSON from Legiscan to", "from django.core.management.base import BaseCommand from general.models import Person from ls_importer.models import LSIDPerson import", "\"\"\" def json_to_person(json_path): json_data = open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id", "try/catch structure exists # to prevent edge cases where # a person may", "LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={", "names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name,", "pj_unfold['middle_name'] person_last_name = pj_unfold['last_name'] person_suffix = pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch", "= pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name']", "Legiscan to the database. \"\"\" from django.core.management.base import BaseCommand from general.models import Person", "pj_unfold['role_id'] # person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name']", "general.models import Person from ls_importer.models import LSIDPerson import json import os from tqdm", "\"\"\" Import a folder (~/people) full of person JSON from Legiscan to the", "from Legiscan to the database.' def handle(self, *args, **options): \"\"\" Make it happen.", "*args, **options): \"\"\" Make it happen. \"\"\" def json_to_person(json_path): json_data = open(json_path) person_json", "link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file in", "= pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch structure exists # to prevent", "pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name = pj_unfold['first_name'] person_middle_name = pj_unfold['middle_name'] person_last_name =", ") target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file in tqdm(os.listdir(target_directory)): if file.endswith(\".json\"): json_to_person(os.path.join(target_directory, file))", "# person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name", "Legiscan # under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created", "= open(json_path) person_json = json.load(json_data) pj_unfold = person_json['person'] person_ls_id = pj_unfold['people_id'] # person_ls_role_id", "of person JSON from Legiscan to the database. \"\"\" from django.core.management.base import BaseCommand", "json import os from tqdm import tqdm class Command(BaseCommand): \"\"\" Import a folder", "pj_unfold['suffix'] person_nickname = pj_unfold['nickname'] # This try/catch structure exists # to prevent edge", "from Legiscan to the database. \"\"\" help = 'Import a folder full of", "person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, } ) link_object,", "help = 'Import a folder full of person JSON from Legiscan to the", "database. \"\"\" help = 'Import a folder full of person JSON from Legiscan", "except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name, middle_name=person_middle_name, last_name=person_last_name, suffix=person_suffix, defaults={ 'nickname': person_nickname, }", "person_nickname, } ) link_object, link_created = LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"),", "folder full of person JSON from Legiscan to the database.' def handle(self, *args,", "person_role = pj_unfold['role'] # person_ls_party_id = pj_unfold['party_id'] # person_name = pj_unfold['name'] person_first_name =", "# -*- coding: utf-8 -*- \"\"\" Import a folder (~/people) full of person", "person JSON from Legiscan to the database.' def handle(self, *args, **options): \"\"\" Make", "twice in Legiscan # under slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist):", "to the database. \"\"\" help = 'Import a folder full of person JSON", "= LSIDPerson.objects.get_or_create( lsid=person_ls_id, person=person_object, ) target_directory = os.path.join(os.path.expanduser(\"~\"), 'people') for file in tqdm(os.listdir(target_directory)):", "slightly different names. try: LSIDPerson.objects.get( lsid=person_ls_id ) except(LSIDPerson.DoesNotExist): person_object, person_created = Person.objects.get_or_create( first_name=person_first_name,", "database. \"\"\" from django.core.management.base import BaseCommand from general.models import Person from ls_importer.models import" ]